Ejemplo n.º 1
0
 public void SetDesktopVolume(float vol)
 {
     if (audAsyncDec != IntPtr.Zero)
     {
         MixCastAV.setDesktopVolume(audAsyncDec, vol);
     }
 }
Ejemplo n.º 2
0
        protected virtual void StopEncoderAsync(System.Object encoderLock)
        {
            if (encodeInterface != -1)
            {
                MixCastAV.ReleaseEncodeInterface(encodeInterface);
                encodeInterface = -1;
            }

            double ElapseTime         = (DateTime.Now - StartEncodingTime).TotalSeconds;
            double ElapsedEncodedTime = (double)_encodedFrameCount / (double)Framerate;
            double PercentEncoded     = 100.0f * (ElapsedEncodedTime / ElapseTime);
            int    PercentEncodedInt  = (int)PercentEncoded;

            Debug.Log("% of possible frames encoded:" + PercentEncodedInt + "%");

            StartEncodingTime  = DateTime.MinValue;
            _encodedFrameCount = 0;
            ReleaseRenderTexture(encoderLock);

            if (encoderRunning)
            {
                EventCenter.HandleEvent(Category, EventCenter.Result.Stopped);
                EventCenter.HandleEvent(Category, EventCenter.Result.Success,
                                        string.Format("{0} {1}", Text.Localization.Get("Info_Encoding_Stopped"), _uriOutput.Replace('/', '\\')),
                                        false);
            }

            encoderRunning = false;
        }
Ejemplo n.º 3
0
        public AudioDevice(string deviceName, int numChannels, int samplingRate, int bitsPerSample)
        {
            if (cfgAudDec != IntPtr.Zero || audDec != IntPtr.Zero || aTxfDec != IntPtr.Zero)
            {
                return;
            }

            //allocate the data and config context
            cfgAudDec = MixCastAV.getAudioDecodeCfg(deviceName, numChannels, samplingRate, bitsPerSample,
                                                    AUDIO_NUMCHANNELS, AUDIO_SAMPLINGRATE, AUDIO_BITSPERSAMPLE);

            //setup the audio decode codec
            audDec = MixCastAV.getVideoDecodeContext(cfgAudDec);

            //setup the audio transformer for decode
            aTxfDec = MixCastAV.getVideoTransformContext(cfgAudDec);

            if (cfgAudDec != IntPtr.Zero && audDec != IntPtr.Zero && aTxfDec != IntPtr.Zero)
            {
                Debug.Log("Started Audio Device");
            }
            else
            {
                return;
            }

            //udatasize = LibAvStuff.getCfgOutputDataSize(cfgAudDec);

            //create the audio asynchronous interface
            //audAsyncDec = LibAvStuff.createAudioDecodeAsync(audDec, cfgAudDec, aTxfDec);
        }
Ejemplo n.º 4
0
        public double GetLevelsDesktop()
        {
            Double level = 0d;

            if (audAsyncDec != IntPtr.Zero)
            {
                MixCastAV.getLevelsAudioDecodeAsyncDesktop(audAsyncDec, ref level);
            }
            return(level);
        }
Ejemplo n.º 5
0
        public float GetDesktopMeterLevel()
        {
            double level = 0.0;

            if (audAsyncDec != IntPtr.Zero)
            {
                MixCastAV.getLevelsAudioDecodeAsyncDesktop(audAsyncDec, ref level);
            }
            return((float)level);
        }
Ejemplo n.º 6
0
        protected virtual void OnEnable()
        {
            if (audioCallContext == null)
            {
                audioCallContext = GetComponentInParent <AudioCallbackContext>();
            }

            //set the callback function for libavstuff
            MixCastAV.SetAudioCallBack(myAudioCallBack);
        }
Ejemplo n.º 7
0
        private void Update()
        {
            createCams.AddRange(MixCast.Settings.cameras);
            destroyCams.AddRange(CameraInstances);
            for (int i = 0; i < CameraInstances.Count; i++)
            {
                MixCastData.CameraCalibrationData camData = CameraInstances[i].Data;
                for (int j = createCams.Count - 1; j >= 0; j--)
                {
                    if (createCams[j] == camData)
                    {
                        createCams.RemoveAt(j);
                    }
                }
            }
            for (int i = 0; i < MixCast.Settings.cameras.Count; i++)
            {
                for (int j = destroyCams.Count - 1; j >= 0; j--)
                {
                    if (destroyCams[j].Data == MixCast.Settings.cameras[i])
                    {
                        destroyCams.RemoveAt(j);
                    }
                }
            }

            for (int i = 0; i < destroyCams.Count; i++)
            {
                CameraInstances.Remove(destroyCams[i]);
                Destroy(destroyCams[i].gameObject);
            }

            for (int i = 0; i < createCams.Count; i++)
            {
                bool wasPrefabActive = cameraPrefab.gameObject.activeSelf;
                cameraPrefab.gameObject.SetActive(false);

                CameraConfigContext instance = Instantiate(cameraPrefab, transform, false);

                instance.Data = createCams[i];

                CameraInstances.Add(instance);

                cameraPrefab.gameObject.SetActive(wasPrefabActive);

                instance.gameObject.SetActive(MixCast.Active);
            }

            destroyCams.Clear();
            createCams.Clear();

            // Process LibAVStuff logs
            MixCastAV.LogOutput();
        }
Ejemplo n.º 8
0
 public void Play()
 {
     if (MixCastAV.startAudioDecodeAsync(audAsyncDec) < 0)
     {
         Debug.LogError("Failed Starting Audio Device Async Interface." + audAsyncDec);
         startRun = false;
     }
     else
     {
         startRun = true;
     }
 }
Ejemplo n.º 9
0
        protected int Setup(string audioAltName, IntPtr vidEnc, IntPtr audEnc, IntPtr cfgAud)
        {
            if (context == null || context.Data == null || vidEnc == IntPtr.Zero || cfgAud == IntPtr.Zero || audEnc == IntPtr.Zero)
            {
                Debug.LogError("The encode or data objects are not yet setup for creating audio encoder");
                return(-1);
            }
            _audioAltName = audioAltName;

            //this uses a different number each time, and the isBufferFreshAudioEncodeAsync() API call will
            //clean the number and its access for if it is not used for two callbacks or more
            encodeInterfaceNumber = MixCastAV.AudioEncodeInterfaceCounter++;

            //TODO:
            //for persistent encode async run, and the aud async becomes dereferenced for whatever reason
            //if (_vidEncCopy != IntPtr.Zero && _audEncCopy != IntPtr.Zero && _cfgAudCopy != IntPtr.Zero)
            //{}

            if (_audAsyncEncode != IntPtr.Zero)
            {
                MixCastAV.stopAudioEncodeAsync(_audAsyncEncode);
                MixCastAV.freeAudioEncodeAsync(_audAsyncEncode);
                _audAsyncEncode = IntPtr.Zero;
            }

            //assumes an universal AudioAsyncFeed
            _audAsyncDecodeCopy = AudioAsyncFeed.Instance(context.Data.id).audAsyncDec;
            _vidEncCopy         = vidEnc;
            _audEncCopy         = audEnc;
            _cfgAudCopy         = cfgAud;

            _audAsyncEncode = MixCastAV.createAudioEncodeAsync(_audAsyncDecodeCopy, _vidEncCopy, _cfgAudCopy, _audEncCopy, MixCastAV.chunksPerSec);

            if (_audAsyncEncode == IntPtr.Zero)
            {
                Debug.LogError("Could not setup audio encode async interface");
                return(-1);
            }

            if (MixCastAV.startAudioEncodeAsync(_audAsyncEncode) < 0)
            {
                MixCastAV.freeAudioEncodeAsync(_audAsyncEncode);
                _audAsyncEncode = IntPtr.Zero;
                Debug.LogError("Could not start audio encode async interface");
                return(-1);
            }
#if _DEBUG
            Debug.LogWarning(string.Format("Encode started for decoder: {0} with encoder: {1}", (int)_audAsyncDecodeCopy, (int)_audAsyncEncode));
#endif
            return(0);
        }
Ejemplo n.º 10
0
 public void Free()
 {
     if (_audAsyncEncode != IntPtr.Zero)
     {
         if (MixCastAV.checkStartedAudioEncodeAsync(_audAsyncEncode) == 0)
         {
             if (MixCastAV.freeAudioEncodeAsync(_audAsyncEncode) < 0)
             {
                 Debug.LogError("Error freeing audio encode interface.\n");
             }
         }
         _audAsyncEncode = IntPtr.Zero;
     }
 }
Ejemplo n.º 11
0
        protected void SendMixCastOutput(MixCastCamera cam, int duplicateFrameCount)
        {
            if (!encoderRunning)
            {
                return;
            }

            ResizeTexture(_width, _height);
            Graphics.Blit(cam.Output, _cameraOutputTexture);
            MixCastAV.encoderSetDuplicateFrameCount(_vCfgEnc, duplicateFrameCount);
            GL.IssuePluginEvent(MixCastAV.GetEncodeInterfaceRenderCallback(), encodeInterface);

            _encodedFrameCount += duplicateFrameCount;
        }
Ejemplo n.º 12
0
        private void _killDecoder()
        {
            bool resFreeDec = false;

            if (audAsyncDec != IntPtr.Zero)
            {
                resFreeDec = MixCastAV.freestopAudioDecodeAsync(audAsyncDec) == 0;
            }
            audAsyncDec = IntPtr.Zero;

            if (resFreeDec == false)
            {
                Debug.LogError("Error Freeing Audio Device Async Interface. " + audAsyncDec);
            }
        }
Ejemplo n.º 13
0
 //do not call this more than once when it is running
 protected void Play()
 {
     if (isRunning == false)
     {
         if (MixCastAV.startAudioDecodeAsync(audAsyncDec) < 0)
         {
             Debug.LogError("Failed Starting Audio Device Async Interface." + audAsyncDec);
             isRunning = false;
         }
         else
         {
             //Debug.LogWarning( "Playing audio" );
             isRunning = true;
             //Debug.LogWarning( "audio decode started for: " + (int)audAsyncDec );
         }
     }
 }
        /// <summary>
        ///     WebcamFeedLibAv(StringBuilder deviceName, int w, int h, int fps, StringBuilder pixFmtStr, bool forceRGB = true)
        ///     Build the decoder with the info supplied in the parameters. This is non-threaded.
        /// </summary>
        /// <param name="deviceName">The device name which supports alternative hardware name</param>
        /// <param name="w">The requested width of the device and also the width of the texture</param>
        /// <param name="h">The requested height of the device and also the height of the texture</param>
        /// <param name="fps">The requested framerate of device</param>
        /// <param name="pixFmtStr">The requested pixel format of the supported device</param>
        /// <param name="forceRGB">Toggle Forced RGB24 for texture</param>
        public DirectShowInputFeedStream(string deviceName, int w, int h, int fps, string pixFmtStr, int rtBufSize, int forceRGB = MixCastAV.FORCE_RGBA)
        {
            width  = w;
            height = h;

            if (cfgVidDec != IntPtr.Zero || vidDec != IntPtr.Zero || vidTxfDec != IntPtr.Zero)
            {
                return;
            }

            cfgVidDec = MixCastAV.getVideoDecodeCfg(deviceName, w, h, fps, pixFmtStr, w, h, fps, MixCastAV.texturePixelFormat, 1);

            if (vidDec == IntPtr.Zero)
            {
                vidDec = MixCastAV.getVideoDecodeContext(cfgVidDec);
            }

            if (vidDec == IntPtr.Zero)
            {
                Texture = null;
                return;
            }

            vidTxfDec = MixCastAV.getVideoTransformContext(cfgVidDec);

            if (vidTxfDec != IntPtr.Zero && vidDec != IntPtr.Zero && cfgVidDec != IntPtr.Zero)
            {
                Debug.Log("Started Device Feed");
            }
            else
            {
                Texture = null;
                return;
            }

            udatasize = MixCastAV.getCfgOutputDataSize(cfgVidDec);

            //initialize the texture format
            Texture          = new Texture2D(w, h, TextureFormat.RGBA32, false, true);
            Texture.wrapMode = TextureWrapMode.Clamp;

            // Initialize decoder camera thread
            decoderInterface = MixCastAV.CreateDecodeInterface(vidDec, cfgVidDec, vidTxfDec);
            MixCastAV.SetDecodeInterfaceTexture(decoderInterface, Texture.GetNativeTexturePtr());
        }
Ejemplo n.º 15
0
        protected void ResizeTexture(int width, int height)
        {
            if (_cameraOutputTexture != null &&
                _cameraOutputTexture.width == width &&
                _cameraOutputTexture.height == height)
            {
                return;
            }

            if (_cameraOutputTexture != null)
            {
                DestroyImmediate(_cameraOutputTexture);
            }

            _cameraOutputTexture = new RenderTexture(width, height, 0);
            _cameraOutputTexture.Create();

            MixCastAV.SetEncodeInterfaceTexture(encodeInterface, _cameraOutputTexture.GetNativeTexturePtr());
        }
Ejemplo n.º 16
0
        public void SetAudioConfiguration(MixCastAV.AUDIOCONFIG cfgType)
        {
            if (audAsyncDec != IntPtr.Zero)
            {
                //useful for debugging
                //if (cfgType == MixCastAV.AUDIOCONFIG.MICROPHONE_AND_DESKTOP)
                //	Debug.Log("The audio mode is set to : MICROPHONE_AND_DESKTOP");
                //if (cfgType == MixCastAV.AUDIOCONFIG.DESKTOP_ONLY)
                //	Debug.Log("The audio mode is set to : DESKTOP_ONLY");
                //if (cfgType == MixCastAV.AUDIOCONFIG.MICROPHONE_ONLY)
                //	Debug.Log("The audio mode is set to : MICROPHONE_ONLY");
                //if (cfgType == MixCastAV.AUDIOCONFIG.NO_AUDIO)
                //	Debug.Log("The audio mode is set to : NO_AUDIO");

                MixCastAV.setCfgAudioDecodeAsync(audAsyncDec, cfgType);
                _adeviceConfiguration = cfgType;                 // save the configuration change
            }
            //Debug.LogWarning( "Setting audio config to " + cfgType.ToString() );
        }
Ejemplo n.º 17
0
        public bool BufferFresh(int interfaceNumber)
        {
            if (MixCastAV.checkStartedAudioDecodeAsync(audAsyncDec) == 0)
            {
                int res = MixCastAV.isBufferFreshAudioDecodeAsync(audAsyncDec, interfaceNumber);
                if (res == -2)
                {
                    Debug.LogError("The async audio decode interface buffer is not ready because it has not yet started.");
                }
#if _DEBUG
                Debug.LogWarning(string.Format("Buffer Freshness: {0} - {1}", (res >= 0 ? "Yes" : "No"), interfaceNumber));
#endif
                return(res == 0 ? true : false);
            }

            else
            {
                return(false);
            }
        }
Ejemplo n.º 18
0
        void ReleaseRenderTexture(object encoderLock)
        {
            if (_cameraOutputTexture != null)
            {
                _cameraOutputTexture.Release();
            }
            _cameraOutputTexture = null;

            if (_vidEnc != IntPtr.Zero && _vCfgEnc != IntPtr.Zero && _vTxfEnc != IntPtr.Zero &&
                _aCfgEnc != IntPtr.Zero && _audEnc != IntPtr.Zero)
            {
                IntPtr _vidEncCopy  = _vidEnc;
                IntPtr _vCfgEncCopy = _vCfgEnc;
                IntPtr _vTxfEncCopy = _vTxfEnc;
                IntPtr _aCfgEncCopy = _aCfgEnc;
                IntPtr _audEncCopy  = _audEnc;


                int msTimeout = 1000 / MixCastAV.chunksPerSec;

                new Thread(() =>
                {
                    lock (encoderLock)
                    {
                        Thread.Sleep(msTimeout);
                        //Debug.Log("Asynchronously cleaning up encoder: " + _encCopy.ToString());
                        MixCastAV.writeTrailerCloseStreams(_vidEncCopy);
                        MixCastAV.freeVideoCfg(_vCfgEncCopy);
                        MixCastAV.freeAudioEncodeContext(_audEncCopy);
                        MixCastAV.freeVideoTransform(_vTxfEncCopy);
                        MixCastAV.freeAudioCfg(_aCfgEncCopy);
                    }
                }).Start();
            }

            _vidEnc  = IntPtr.Zero;
            _vCfgEnc = IntPtr.Zero;
            _vTxfEnc = IntPtr.Zero;
            _aCfgEnc = IntPtr.Zero;
            _audEnc  = IntPtr.Zero;
        }
Ejemplo n.º 19
0
        private void _killDecoder()
        {
            bool resFreeDec = false;
            bool resFreeCfg = false;
            bool resFreeTxf = false;

            MixCastAV.freestopAudioDecodeAsync(audAsyncDec);
            System.Threading.Thread.Sleep(2);             //untested amount of sleep time in ms needed to avoid race condition
            audAsyncDec = IntPtr.Zero;

            //free the decoder
            if (audDec != IntPtr.Zero)
            {
                resFreeDec = MixCastAV.freeAudioDecodeContext(audDec) == 0 ? true : false;
            }
            audDec = IntPtr.Zero;

            //free the data config
            if (cfgAudDec != IntPtr.Zero)
            {
                resFreeCfg = MixCastAV.freeAudioCfg(cfgAudDec) == 0 ? true : false;
            }
            cfgAudDec = IntPtr.Zero;

            //free the transformer
            if (aTxfDec != IntPtr.Zero)
            {
                resFreeTxf = MixCastAV.freeAudioTransform(aTxfDec) == 0 ? true : false;
            }
            aTxfDec = IntPtr.Zero;


            if (resFreeDec == false || resFreeCfg == false || resFreeTxf == false)
            {
                Debug.LogError("Error Freeing Audio Device. " + audDec);
            }
        }
        private void _killDecoder()
        {
            bool resFreeDec = false;
            bool resFreeCfg = false;
            bool resFreeTxf = false;

            MixCastAV.ReleaseDecodeInterface(decoderInterface);
            System.Threading.Thread.Sleep(2);             //untested amount of sleep time in ms needed to avoid race condition


            //free the decoder
            if (vidDec != IntPtr.Zero)
            {
                resFreeDec = MixCastAV.freeDecodeContext(vidDec) == 0 ? true : false;
            }
            vidDec = IntPtr.Zero;

            //free the data config
            if (cfgVidDec != IntPtr.Zero)
            {
                resFreeCfg = MixCastAV.freeVideoCfg(cfgVidDec) == 0 ? true : false;
            }
            cfgVidDec = IntPtr.Zero;

            //free the transformer
            if (vidTxfDec != IntPtr.Zero)
            {
                resFreeTxf = MixCastAV.freeVideoTransform(vidTxfDec) == 0 ? true : false;
            }
            vidTxfDec = IntPtr.Zero;


            if (resFreeDec == false || resFreeCfg == false || resFreeTxf == false)
            {
                Debug.LogError("Error Freeing Device Feed. " + vidDec);
            }
        }
 public void Play()
 {
     startRun = true;
     MixCastAV.StartDecodeInterface(decoderInterface);
 }
 public void RenderFrame()
 {
     GL.IssuePluginEvent(MixCastAV.GetDecodeInterfaceRenderCallback(), decoderInterface);
 }
Ejemplo n.º 23
0
        protected int EncodeWrite()
        {
            if (_audAsyncEncode == IntPtr.Zero || context == null || context.Data == null || _audAsyncDecodeCopy == IntPtr.Zero)
            {
                if (isShuttingDown)
                {
                    return(-1);
                }
                Debug.LogError("Audio async encode interface is not yet initialized.");
                return(-1);
            }
            var  feed    = AudioAsyncFeed.Instance(context.Data.id);
            bool isFresh = false;

            //push the audio samples to the output muxer if ready
            if (feed != null &&
                MixCastAV.checkStartedAudioEncodeAsync(_audAsyncEncode) == 0 &&
                feed.isRunning == true && _audAsyncDecodeCopy != IntPtr.Zero)
            {
                //check if the audio stream started, if it hasn't then, return early
                if (MixCastAV.checkStartedAudioDecodeAsync(_audAsyncDecodeCopy) != 0)
                {
                    return(-1);
                }

                isFresh = feed.BufferFresh(encodeInterfaceNumber);
                if (isFresh == true)
                {
                    if (MixCastAV.updateBufferAudioEncodeAsync(_audAsyncEncode) < 0)
                    {
                        Debug.LogError("Error updating the audio async encode interface in pulling new data.");
                    }
                }
            }
            else
            {
                if (isShuttingDown)
                {
                    return(-1);
                }

                if (isFresh == false)
                {
                    Debug.LogError("Error, the buffer was not fresh from the decoder audio async feed");
                }
                else if (MixCastAV.checkStartedAudioEncodeAsync(_audAsyncEncode) != 0)
                {
                    Debug.LogError("Error, the encoder was not yet started or had problems starting");
                }
                //the buffer is not fresh from the audio decode async interface, may help to catch some weird bugs
                else if (IsOverAccessBuffer == true)
                {
                    Debug.Log("Checked buffer, but the buffer from the decode async interface was not yet ready to encode. " + encodeInterfaceNumber);
                }

                IsOverAccessBuffer = true;
                return(-1);                //since we are using a callback to write when ready, this is abnormal, if it happens often
            }

            IsOverAccessBuffer = false;
            return(0);
        }
Ejemplo n.º 24
0
        protected bool BuildEncoder(string outputPath)
        {
            if (_vCfgEnc != IntPtr.Zero || _vidEnc != IntPtr.Zero || _vTxfEnc != IntPtr.Zero ||
                _audEnc != IntPtr.Zero || _aCfgEnc != IntPtr.Zero)
            {
                Debug.LogError("Could not setup the encoder, previous session is still running");
                return(false);
            }

            encodeInterface = -1;

            //build our encoder here
            _vCfgEnc = MixCastAV.getVideoEncodeCfg(outputPath, _width, _height,
                                                   Framerate, SRC_PIX_FMT, _width, _height, Framerate, DST_PIX_FMT,
                                                   GopSize, BitRate, CODEC_TYPE, CODEC_NAME, FLIP_VERTICAL);

            //Debug.LogWarningFormat( "vCfgEnc: w({0}), h({1}), src_pix({2}), dst_pix({3}), GopSize({4}), bitrate({5}), codec_type({6}), codec_name({7})",
            //    _width, _height, SRC_PIX_FMT, DST_PIX_FMT, GopSize, BitRate, CODEC_TYPE, CODEC_NAME );

            //_aCfgEnc = LibAvStuff.getAudioEncodeCfg(new StringBuilder(dummyName), DEFCHANNELS, DEFSAMPLERATE, DEFBITDEPTH,
            //    DEFCHANNELS, DEFSAMPLERATE, DEFBITDEPTH, AudioBitrate, new StringBuilder(MP2_CODECNAME), new StringBuilder(MP2_CODECNAME));

            //for AAC, we should be using 32 bit depth
            _aCfgEnc = MixCastAV.getAudioEncodeCfg(dummyName, DEFCHANNELS, DEFSAMPLERATE, DEFBITDEPTH,
                                                   DEFCHANNELS, DEFSAMPLERATE, AAC_BITDEPTH, AudioBitrate, AAC_CODECNAME, AAC_CODECNAME);

            int ret = MixCastAV.getAudioAndVideoEncodeContextMux(ref _audEnc, ref _vidEnc, _aCfgEnc, _vCfgEnc);

            if (_vidEnc == IntPtr.Zero || _audEnc == IntPtr.Zero || ret < 0)
            {
                Debug.LogError("Could not setup the encoder, please check configuration");
                EventCenter.HandleEvent(Category, EventCenter.Result.Error, "Warning_Video_Encoder_Error", true);
                MixCastAV.freeVideoCfg(_vCfgEnc);
                MixCastAV.freeAudioCfg(_aCfgEnc);
                _vCfgEnc = IntPtr.Zero;
                _aCfgEnc = IntPtr.Zero;
                return(false);
            }

            _vTxfEnc = MixCastAV.getVideoTransformContext(_vCfgEnc);
            if (_vTxfEnc == IntPtr.Zero)
            {
                Debug.LogError("Could not setup the video transformer for encoding, please check configuration");
                EventCenter.HandleEvent(Category, EventCenter.Result.Error, "Warning_Video_Encoder_Error", true);
                MixCastAV.freeVideoCfg(_vCfgEnc);
                _vCfgEnc = IntPtr.Zero;
                _vidEnc  = IntPtr.Zero;
                return(false);
            }

            if (_bitrateKbps <= 0)
            {
                _bitrateKbps = (ulong)(_width * _height / BITS_IN_KILOBIT);
            }

            if (_vidEnc != IntPtr.Zero && _vCfgEnc != IntPtr.Zero && _vTxfEnc != IntPtr.Zero)
            {
                encodeInterface = MixCastAV.CreateEncodeInterface(_vidEnc, _vCfgEnc, _vTxfEnc);
            }

            return(encodeInterface != -1);
        }
Ejemplo n.º 25
0
        //multi tool function
        public RETURNCHANGETYPE SetPlay(string altName, int numChannels, int samplingRate, int bitsPerSample,
                                        MixCastAV.AUDIOCONFIG audioConfig, float micVolume, float desktopVolume, int delayMs)
        {
            if (_suppressingPlayback)
            {
                return(RETURNCHANGETYPE.NothingNewDoNothing);
            }
            //Debug.Log( "SetPlay()" );
            if (delayMs > 1000)
            {
                Debug.LogWarning("Delay is too high for the audio, " + delayMs + "ms, setting it to 1000ms.");
                delayMs = 1000;
            }

            //create the audio asynchronous interface
            string DeviceNameSwitch    = altName;
            int    nChannelsSwitch     = numChannels;
            int    samplingRateSwitch  = samplingRate;
            int    bitsPerSampleSwitch = bitsPerSample;

            MixCastAV.AUDIOCONFIG configSwitch = audioConfig;

            //when the string is null, we want to use some defaults for a null audio track still
            if (string.IsNullOrEmpty(altName) == true || altName.Contains(AudioDeviceManager.ALTNAMEFORNULL))
            {
                //dummy info for null track when no data found
                DeviceNameSwitch    = AudioDeviceManager.ALTNAMEFORNULL;
                nChannelsSwitch     = MixCastAV.DEFAUDIO_CHANNELS;
                samplingRateSwitch  = MixCastAV.DEFAUDIO_SAMPLING;
                bitsPerSampleSwitch = MixCastAV.DEFAUDIO_BITDEPTH;
            }


            //if it is exactly the same as last configuration
            if (audAsyncDec != IntPtr.Zero)
            {
                if (_adeviceAltName == altName &&
                    _adeviceBitsPerSample == bitsPerSample &&
                    _adeviceChannels == numChannels &&
                    _adeviceSamplingRate == samplingRate &&
                    _adeviceDelayMs == delayMs)
                {
                    if (_adeviceConfiguration == audioConfig)
                    {
                        //Debug.LogWarning( "No audio change for " + altName );
                        return(RETURNCHANGETYPE.NothingNewDoNothing); //nothing to do since it is the same as last time
                    }
                    else
                    {
                        // only audioConfig changed
                        SetAudioConfiguration(audioConfig);
                        //Debug.LogWarning( "Audio Config: " + audioConfig.ToString() );
                        return(RETURNCHANGETYPE.ConfigurationChangeOnly);
                    }
                }
                else
                {
                    Stop();
                }
            }

            //Debug.LogError("devicename: " + deviceName + ", nCh: " + numChannels + ", sampling: " + samplingRate + ", bitsPer: " + bitsPerSample + ", cfg: " + audioConfig);
            audAsyncDec = MixCastAV.createAudioDecodeAsync(DeviceNameSwitch, nChannelsSwitch,
                                                           samplingRateSwitch, bitsPerSampleSwitch, delayMs, MixCastAV.AUDIOCONFIG.MICROPHONE_AND_DESKTOP, MixCastAV.chunksPerSec);

            //Debug.Log("delay is set to : " + delayMs);

            if (audAsyncDec == IntPtr.Zero)
            {
                //Debug.LogError("Error creating Audio Device Async Interface." + audAsyncDec);
                Debug.LogWarning("Error creating decoder");
                return(RETURNCHANGETYPE.ErrorCreating);
            }
            else             //audAsyncDec is already ready
            {
                //successfully created, so save the variables
                _adeviceAltName       = DeviceNameSwitch;
                _adeviceChannels      = nChannelsSwitch;
                _adeviceSamplingRate  = samplingRateSwitch;
                _adeviceBitsPerSample = bitsPerSampleSwitch;
                _adeviceConfiguration = configSwitch;
                _adeviceDelayMs       = delayMs;

                MixCastAV.setMicVolume(audAsyncDec, micVolume);
                MixCastAV.setDesktopVolume(audAsyncDec, desktopVolume);
                Play();
                SetAudioConfiguration(audioConfig);
                //set intended configuration
                //if (LibAvStuff.checkStartedAudioDecodeAsync(audAsyncDec) == 0)

                // deviceName = _adeviceAltName; // commenting out so no import warning in SDK

                return(RETURNCHANGETYPE.MadeNewDevice);
            }
        }