Exemplo n.º 1
0
        public SpeexPreprocessFilter(int samplesPerFrame, int samplesPerSecond, MediaConfig config, IAudioTwoWayFilter echoCancelFilter, string instanceName = "")
        {
            this.samplesPerSecond = samplesPerSecond;
#if SILVERLIGHT
            st           = this;
            InstanceName = instanceName;
            st.config    = config;

            speex_preprocess_state_init(samplesPerFrame, samplesPerSecond);

            AgcLevel        = 8000;
            DereverbEnabled = false;

            // ks 3/14/11 - VAD is supposedly a "kludge" right now, i.e., it's based on the overall power of the frame,
            // and that's it. See http://lists.xiph.org/pipermail/speex-dev/2006-March/004271.html for the "fix" that eventually
            // made its way into Speex as the "kludge". But turning it on seems to help, especially with AGC.
            VadEnabled = true;

            // ks 3/14/11 - Adjusted these, because the defaults amplify background noise too much.
            // See http://lists.xiph.org/pipermail/speex-dev/2007-May/005696.html
            AgcMaxGain = 15;                                     // Default is 30
            // NoiseSuppression = -30; // Default is -15. Recommended is -30, but that sounds awful in my environment.
            EchoState = echoCancelFilter as SpeexEchoCanceller2; // Will store null if the provided echo canceller isn't a Speex echo canceller, which is what we want.
#endif
        }
Exemplo n.º 2
0
 public MediaServerViewModel(MediaConfig mediaConfig, AudioFormat audioFormat, MediaStatistics mediaStatistics, MediaEnvironment mediaEnvironment, IMediaConnection mediaConnection, IVideoQualityController videoQualityController, string roomId)
 {
     MediaController = new MediaController(mediaConfig, audioFormat, mediaStatistics, mediaEnvironment, mediaConnection, videoQualityController);
     RoomId          = roomId;
     MediaServerKpis = new ObservableCollection <MediaServerKpi>();
     MediaController.MediaStats.Counters.CollectionChanged += Counters_CollectionChanged;
 }
Exemplo n.º 3
0
        /// <summary>
        /// Setting up the sender. This is called once the receiver is registered
        /// at the signaling server and is ready to receive an incoming connection.
        /// </summary>
        private void SenderSetup()
        {
            //STEP5: sending up the sender
            Debug.Log("sender setup");

            sender = UnityCallFactory.Instance.Create(netConf);
            MediaConfig mediaConf2 = new MediaConfig();

            //keep video = false for now to keep the example simple & without UI
            mediaConf2.Video = false;
            //send audio. an echo will be heard to confirm it works
            mediaConf2.Audio  = true;
            sender.CallEvent += Sender_CallEvent;

            //Set the configuration. It will trigger an ConfigurationComplete
            // event once completed or ConnectionFailed if something went wrong.
            //
            //Note: platforms handle this very differently e.g.
            // * Windows & Mac will access the media devices and immediately trigger
            //  ConfigurationComplete event
            // * iOS and Android might ask the user first for permission
            //   (or crash the app if it isn't allowed to access! Check your
            //    Unity project setup!)
            // * WebGL behavior is browser specific. Currently, Chrome has a fixed
            //   audio & video device configured and just asks for access while
            //   firefox lets the user decide which device to use once Configure is
            //   called.
            // See Receiver_CallEvent for next step
            sender.Configure(mediaConf2);
        }
Exemplo n.º 4
0
        public void InitPjsua(UaConfig uaCfg, LoggingConfig logCfg, MediaConfig mediaCfg)
        {
            var ua_cfg = _mapper.Map(uaCfg, _uaCfg);
            var l_cfg  = _mapper.Map(logCfg, _lCfg);

            ua_cfg.cb.on_reg_state = OnRegState;

            ua_cfg.cb.on_call_state            = OnCallState;
            ua_cfg.cb.on_call_media_state      = OnCallMediaState;
            ua_cfg.cb.on_incoming_call         = OnIncomingCall;
            ua_cfg.cb.on_stream_destroyed      = OnStreamDestroyed;
            ua_cfg.cb.on_dtmf_digit            = OnDtmfDigit;
            ua_cfg.cb.on_call_transfer_request = OnCallTransfer;
            ua_cfg.cb.on_call_transfer_status  = OnCallTransferStatus;
            ua_cfg.cb.on_call_redirected       = OnCallRedirect;

            ua_cfg.cb.on_nat_detect = OnNatDetect;

            ua_cfg.cb.on_buddy_state        = OnBuddyState;
            ua_cfg.cb.on_incoming_subscribe = OnIncomingSubscribe;
            ua_cfg.cb.on_pager        = OnPager;
            ua_cfg.cb.on_pager_status = OnPagerStatus;
            ua_cfg.cb.on_typing       = OnTyping;

            l_cfg.AnonymousMember1 = OnLog;

            //etc;
            _mCfg = new pjsua_media_config();
            PJSUA_DLL.Media.pjsua_media_config_default(_mCfg);
            Helper.GuardError(PJSUA_DLL.Basic.pjsua_init(ua_cfg, l_cfg, _mapper.Map(mediaCfg, _mCfg)));
        }
Exemplo n.º 5
0
 //
 protected virtual void Awake()
 {
     mUi = GetComponent <CallAppUi>();
     Init();
     mMediaConfig      = CreateMediaConfig();
     mMediaConfigInUse = mMediaConfig;
 }
Exemplo n.º 6
0
    public override MediaConfig CreateMediaConfig()
    {
        MediaConfig config = base.CreateMediaConfig();

        switch (role)
        {
        case IOType.Streamer:
            config.Video           = true;          // Send, not receive
            config.VideoDeviceName = "StreamerCam"; // TODO: get camera
            break;

        case IOType.Receiver:
            config.Video           = true; // Receive, not send
            config.VideoDeviceName = "StreamerCam";
            break;

        case IOType.Undefined:
            Debug.LogWarning("Waiting for other side, returning null (MediaConfig)!");
            return(null);
        }

        config.Audio = false;

        return(config);
    }
Exemplo n.º 7
0
 private void Image_Add_Click(object sender, RoutedEventArgs e)
 {
     try
     {
         System.Windows.Forms.FolderBrowserDialog fbr = new System.Windows.Forms.FolderBrowserDialog();
         if (fbr.ShowDialog() == System.Windows.Forms.DialogResult.OK)
         {
             string   path = fbr.SelectedPath;
             FileInfo fi   = new FileInfo(path);
             if (!this.dictImage.ContainsKey(fi.Name))
             {
                 this.dictImage.Add(fi.Name, path);
                 ObservableCollection <ListBoxMediaItem> test = this.listbox2.ItemsSource as ObservableCollection <ListBoxMediaItem>;
                 test.Add(new ListBoxMediaItem {
                     Name = fi.Name, ImagePath = "/styles/skin/item/item_image.png"
                 });
                 //MediaConfig.AddMediaLibrary(path);
                 MediaConfig.AddMyImageLibrary(path);
                 //JAVAUtil.AddSourceToHTTP(path);
                 //new Thread(new ThreadStart(delegate ()
                 //{
                 new function.media.MediaFunction().GetThumbnail(path);
                 util.JAVAUtil.AddAlltoHttp(path);
                 //})).Start();
             }
         }
     }catch (Exception error)
     {
         log.Info(string.Format("图片添加失败:{0}:", error.Message));
     }
 }
Exemplo n.º 8
0
        public void Configure(MediaConfig config)
        {
            int minWidth       = -1;
            int minHeight      = -1;
            int maxWidth       = -1;
            int maxHeight      = -1;
            int idealWidth     = -1;
            int idealHeight    = -1;
            int minFrameRate   = -1;
            int maxFrameRate   = -1;
            int idealFrameRate = -1;

            SetOptional(config.MinWidth, ref minWidth);
            SetOptional(config.MinHeight, ref minHeight);
            SetOptional(config.MaxWidth, ref maxWidth);
            SetOptional(config.MaxHeight, ref maxHeight);
            SetOptional(config.IdealWidth, ref idealWidth);
            SetOptional(config.IdealHeight, ref idealHeight);

            SetOptional(config.MinFrameRate, ref minFrameRate);
            SetOptional(config.MaxFrameRate, ref maxFrameRate);
            SetOptional(config.IdealFrameRate, ref idealFrameRate);


            CAPI.Unity_MediaNetwork_Configure(mReference,
                                              config.Audio, config.Video,
                                              minWidth, minHeight,
                                              maxWidth, maxHeight,
                                              idealWidth, idealHeight,
                                              minFrameRate, maxFrameRate, idealFrameRate, config.VideoDeviceName
                                              );
        }
Exemplo n.º 9
0
        /// <summary>
        /// This is setting the media used for this call.
        /// </summary>
        public void Configure()
        {
            //STEP2: configure media devices
            MediaConfig mediaConfig = new MediaConfig();

            if (_Sender)
            {
                //sender is sending audio and video
                mediaConfig.Audio = true;
                mediaConfig.Video = true;

                //We ask for 320x240 (should work fine
                //even on the weakest systems)
                //note that not all devices can actually
                //deliver the resolution we ask for
                mediaConfig.IdealWidth  = 320;
                mediaConfig.IdealHeight = 240;
            }
            else
            {
                //set to false to avoid
                //echo & multiple calls trying to access the same camera
                mediaConfig.Audio = false;
                mediaConfig.Video = false;
            }

            mCall.Configure(mediaConfig);
            mState = SimpleCallState.Config;
            Log("Configure media devices");
        }
Exemplo n.º 10
0
 public TestAudioContextFactory(MediaConfig mediaConfig, AudioFormat rawAudioFormat, AudioFormat transmittedAudioFormat, SpeechEnhancementStack enhancementStack, AudioCodecType codecType)
 {
     this.mediaConfig            = mediaConfig;
     this.rawAudioFormat         = rawAudioFormat;
     this.transmittedAudioFormat = transmittedAudioFormat;
     this.enhancementStack       = enhancementStack;
     this.codecType = codecType;
 }
Exemplo n.º 11
0
 public void Configure(MediaConfig config)
 {
     UnityMediaNetwork_Configure(mReference,
                                 config.Audio, config.Video,
                                 config.MinWidth, config.MinHeight,
                                 config.MaxWidth, config.MaxHeight,
                                 config.IdealWidth, config.IdealHeight);
 }
Exemplo n.º 12
0
 public void Configure(IMvxPluginConfiguration configuration)
 {
     if (!(configuration is MediaConfig))
     {
         throw new System.Exception("Configuration is not a valid MediaConfig.");
     }
     _Configuration = (MediaConfig)configuration;
 }
Exemplo n.º 13
0
 public MaskinStartPageController(IMachineRepository machineRepository,
                                  IUserManagementService usersManagementService,
                                  IGroMachineService groMachineService,
                                  MediaConfig mediaConfig) : base(usersManagementService)
 {
     _machineRepository = machineRepository;
     _groMachineService = groMachineService;
 }
        public CustomMediaManager(BaseFactory factory)
        {
            XmlNode configNode = factory.GetConfigNode("mediaLibrary");

            this.config  = configNode == null ? new MediaConfig() : new MediaConfig(configNode);
            this.creator = this.ResolveCreator(factory, configNode);
            cdnHelper    = ServiceLocator.ServiceProvider.GetService <ICdnHelper>();
        }
Exemplo n.º 15
0
    /// <summary>
    /// Creates the call object and uses the configure method to activate the
    /// video / audio support if the values are set to true.
    /// </summary>
    /// generating new frames after this call so the user can see himself before
    /// the call is connected.</param>
    public virtual void SetupCall()
    {
        Append("Setting up ...");

        //hacks to turn off certain connection types. If both set to true only
        //turn servers are used. This helps simulating a NAT that doesn't support
        //opening ports.
        //hack to turn off direct connections
        //Byn.Net.Native.AWebRtcPeer.sDebugIgnoreTypHost = true;
        //hack to turn off connections via stun servers
        //Byn.Net.Native.WebRtcDataPeer.sDebugIgnoreTypSrflx = true;

        NetworkConfig netConfig = CreateNetworkConfig();


        Debug.Log("Creating call using NetworkConfig:" + netConfig);
        //setup the server
        mCall = UnityCallFactory.Instance.Create(netConfig);
        if (mCall == null)
        {
            Append("Failed to create the call");
            return;
        }
        mCall.LocalFrameEvents = mLocalFrameEvents;
        string[] devices = UnityCallFactory.Instance.GetVideoDevices();
        if (devices == null || devices.Length == 0)
        {
            Debug.Log("no device found or no device information available");
        }
        else
        {
            foreach (string s in devices)
            {
                Debug.Log("device found: " + s);
            }
        }
        Append("Call created!");
        mCall.CallEvent += Call_CallEvent;

        //this happens in awake now to allow an ui or other external app
        //to change media config before calling SetupCall
        //mMediaConfig = CreateMediaConfig();

        //make a deep clone to avoid confusion if settings are changed
        //at runtime.
        mMediaConfigInUse = mMediaConfig.DeepClone();
        Debug.Log("Configure call using MediaConfig: " + mMediaConfigInUse);
        mCall.Configure(mMediaConfigInUse);
        mUi.SetGuiState(false);

        if (mBlockSleep)
        {
            //backup sleep timeout and set it to never sleep
            mSleepTimeoutBackup = Screen.sleepTimeout;
            Screen.sleepTimeout = SleepTimeout.NeverSleep;
        }
    }
Exemplo n.º 16
0
        public void Setup()
        {
            Sitecore.Resources.Media.MediaProvider mediaProvider =
                NSubstitute.Substitute.For<Sitecore.Resources.Media.MediaProvider>();
            _switcher = new Sitecore.FakeDb.Resources.Media.MediaProviderSwitcher(mediaProvider);

            var config = new MediaConfig(Sitecore.Configuration.Factory.GetConfigNode("mediaLibrary"));
            MediaManager.Config = config;
        }
Exemplo n.º 17
0
    /// <summary>
    /// Creates the call object and uses the configure method to activate the
    /// video / audio support if the values are set to true.
    /// generating new frames after this call so the user can see himself before
    /// the call is connected.
    /// </summary>
    public virtual void SetupCall()
    {
        Append("Setting up ...");

        //hacks to turn off certain connection types. If both set to true only
        //turn servers are used. This helps simulating a NAT that doesn't support
        //opening ports.
        //hack to turn off direct connections
        //Byn.Net.Native.AWebRtcPeer.sDebugIgnoreTypHost = true;
        //hack to turn off connections via stun servers
        //Byn.Net.Native.WebRtcDataPeer.sDebugIgnoreTypSrflx = true;

        NetworkConfig netConfig = CreateNetworkConfig();


        Debug.Log("Creating call using NetworkConfig:" + netConfig);
        //setup the server
        mCall = UnityCallFactory.Instance.Create(netConfig);
        if (mCall == null)
        {
            Append("Failed to create the call");
            return;
        }

        mCall.LocalFrameEvents = mLocalFrameEvents;
        string[] devices = UnityCallFactory.Instance.GetVideoDevices();
        if (devices == null || devices.Length == 0)
        {
            Debug.Log("no device found or no device information available");
        }
        else
        {
            foreach (string s in devices)
            {
                Debug.Log("device found: " + s + " IsFrontFacing: " + UnityCallFactory.Instance.IsFrontFacing(s));
            }
        }
        Append("Call created!");
        mCall.CallEvent += Call_CallEvent;



        //make a deep clone to avoid confusion if settings are changed
        //at runtime.
        mMediaConfigInUse = mMediaConfig.DeepClone();

        //try to pick a good default video device if the user wants to send video but
        //didn't bother to pick a specific device
        if (mMediaConfigInUse.Video && string.IsNullOrEmpty(mMediaConfigInUse.VideoDeviceName))
        {
            mMediaConfigInUse.VideoDeviceName = UnityCallFactory.Instance.GetDefaultVideoDevice();
        }

        Debug.Log("Configure call using MediaConfig: " + mMediaConfigInUse);
        mCall.Configure(mMediaConfigInUse);
        mUi.SetGuiState(false);
    }
 public TestMultipleDestinationAudioSinkAdapter(
     CaptureSource captureSource,
     SourceMediaController mediaController,
     Dictionary <Guid, DestinationMediaController> mediaControllers,
     MediaConfig mediaConfig)
     : base(captureSource, mediaController, mediaConfig, new TestMediaEnvironment(), AudioFormat.Default)
 {
     this.mediaControllers = mediaControllers;
 }
Exemplo n.º 19
0
 public static IMediaValidator GetValidator(MediaType mediaType, MediaConfig config)
 {
     switch (mediaType)
     {
         case MediaType.Audio: return new AudioValidator(config);
         case MediaType.Video: return new VideoValidator(config);
         case MediaType.Image: return new ImageValidator(config);
         default: return null;
     }
 }
Exemplo n.º 20
0
        private void SenderSetup()
        {
            //STEP4: receiver is ready -> start the sender
            Debug.Log("sender setup");
            sender = UnityCallFactory.Instance.CreateMediaNetwork(netConf);
            MediaConfig mediaConf2 = new MediaConfig();

            mediaConf2.Video = false;
            mediaConf2.Audio = true;
            sender.Configure(mediaConf2);
        }
 public MultipleControllerAudioSinkAdapter(MediaConfig mediaConfig, CaptureSource captureSource, int frequency)
 {
     ClientLogger.Debug("MultipleControllerAudioSinkAdapter created");
     this.mediaConfig     = mediaConfig;
     AudioControllers     = new List <IAudioController>();
     AudioContexts        = new List <AudioContext>();
     CaptureSource        = captureSource;
     RawAudioFormat       = new AudioFormat(CaptureSource.AudioCaptureDevice.DesiredFormat.SamplesPerSecond);
     oscillator           = new Oscillator();
     oscillator.Frequency = frequency;
 }
 public FromFileAudioSinkAdapter(
     CaptureSource captureSource,
     IAudioController audioController,
     MediaConfig mediaConfig,
     IMediaEnvironment mediaEnvironment,
     AudioFormat playedAudioFormat,
     List <byte[]> testFrames)
     : base(captureSource, audioController, mediaConfig, mediaEnvironment, playedAudioFormat)
 {
     _testFrames = testFrames;
 }
Exemplo n.º 23
0
 public MyAccountController(
     ISecurityRepository securityRepository,
     IUserManagementService userManager,
     IOrganizationRepository organizationRepository,
     IFileRepository fileRepository,
     MediaConfig mediaConfig) : base(userManager)
 {
     _securityRepository = securityRepository;
     _organizationRepo   = organizationRepository;
     _mediaConfig        = mediaConfig;
     _fileRepository     = fileRepository;
 }
Exemplo n.º 24
0
 public TimingAudioSinkAdapter(
     AudioContext audioContext,
     CaptureSource captureSource,
     IAudioController audioController,
     MediaConfig mediaConfig,
     IMediaEnvironment mediaEnvironment,
     AudioFormat playedAudioFormat)
     : base(captureSource, audioController, mediaConfig, mediaEnvironment, playedAudioFormat)
 {
     _audioContext = audioContext;
     ClientLogger.Debug(GetType().Name + " created.");
 }
    /// <summary>
    /// Create the default configuration for this CallApp instance.
    /// This can be overwritten in a subclass allowing the creation custom apps that
    /// use a slightly different configuration.
    /// </summary>
    /// <returns></returns>
    public virtual MediaConfig CreateMediaConfig()
    {
        MediaConfig mediaConfig = new MediaConfig();
        //testing echo cancellation (native only)
        bool useEchoCancellation = false;

        if (useEchoCancellation)
        {
#if !UNITY_WEBGL
            var nativeConfig = new Byn.Media.Native.NativeMediaConfig();
            nativeConfig.AudioOptions.echo_cancellation   = true;
            nativeConfig.AudioOptions.extended_filter_aec = true;
            nativeConfig.AudioOptions.delay_agnostic_aec  = true;

            mediaConfig = nativeConfig;
#endif
        }



        //use video and audio by default (the UI is toggled on by default as well it will change on click )
        mediaConfig.Audio = true;
        mediaConfig.Video = true;
//        mediaConfig.VideoDeviceName = CameraName;
//        if (mediaConfig.VideoDeviceName == "")
//            mediaConfig.VideoDeviceName = null;

        int  n         = -1;
        bool isNumeric = int.TryParse(CameraNameOrIndex, out n);

        if (isNumeric)
        {
            mediaConfig.VideoDeviceName = UnityCallFactory.Instance.GetVideoDevices()[n];
        }
        else
        {
            mediaConfig.VideoDeviceName = CameraNameOrIndex;
        }


        //keep the resolution low.
        //This helps avoiding problems with very weak CPU's and very high resolution cameras
        //(apparently a problem with win10 tablets)
        mediaConfig.MinWidth       = 160;
        mediaConfig.MinHeight      = 120;
        mediaConfig.MaxWidth       = 1920;
        mediaConfig.MaxHeight      = 1080;
        mediaConfig.IdealWidth     = 640;
        mediaConfig.IdealHeight    = 480;
        mediaConfig.IdealFrameRate = 30;
        return(mediaConfig);
    }
Exemplo n.º 26
0
        private void SetupReceiver()
        {
            //STEP2: Setup the receiver. See UpdateReceiver() for event handling
            Debug.Log("receiver setup");
            MediaConfig mediaConf1 = new MediaConfig();

            //first one only receives
            mediaConf1.Video = false;
            mediaConf1.Audio = false;

            receiver = UnityCallFactory.Instance.CreateMediaNetwork(netConf);
            receiver.Configure(mediaConf1);
        }
Exemplo n.º 27
0
        private void SetupCalls()
        {
            MediaConfig mediaConf1 = new MediaConfig();

            mediaConf1.Video = false;
            mediaConf1.Audio = true;
            for (int i = 0; i < calls.Length; i++)
            {
                Debug.Log(i + " setup");
                calls[i]            = UnityCallFactory.Instance.Create(netConf);
                calls[i].CallEvent += OnCallEvent;
                calls[i].Configure(mediaConf1);
            }
        }
    IEnumerator Start()
    {
        //(Jonathon)
        //Other things have to initialize as well, so  wait a frame

        yield return(null);


        Init();
        mMediaConfig      = CreateMediaConfig();
        mMediaConfigInUse = mMediaConfig;
        SetupCall();
        Join(SecretPassword);
    }
Exemplo n.º 29
0
 public MachineDetailPageController(IMachineRepository machineRepository,
                                    IOrganizationUserRepository orgUserRepo,
                                    IUserManagementService userManager,
                                    IFileRepository fileRepository,
                                    MediaConfig mediaConfig,
                                    IGroContentDataService groContentDataService,
                                    IGroMachineService groMachineService) : base(userManager)
 {
     _orgUserRepo       = orgUserRepo;
     _machineRepository = machineRepository;
     _fileRepository    = fileRepository;
     _mediaConfig       = mediaConfig;
     _groMachineService = groMachineService;
 }
        public SingleAudioContextFactory(AudioContext audioContext, AudioFormat rawAudioFormat, AudioFormat playedAudioFormat, MediaConfig mediaConfig, IMediaEnvironment mediaEnvironment)
        {
            RawAudioFormat    = rawAudioFormat;
            PlayedAudioFormat = playedAudioFormat;
            MediaConfig       = mediaConfig;
            MediaEnvironment  = mediaEnvironment;

            // Hack!!!! We need to make a copy of the audioContext, but with a few tweaks.
            // When the audio context is first created, we don't know what the rawAudioFormat will be,
            // but it should be accurate by this point, so we need to recreate the AudioContext.
            var resampler = new ResampleFilter(rawAudioFormat, playedAudioFormat);

            resampler.InstanceName    = audioContext.Resampler.InstanceName;
            _audioContext             = new AudioContext(playedAudioFormat, resampler, audioContext.DtxFilter, audioContext.SpeechEnhancementStack, audioContext.Encoder);
            _audioContext.Description = audioContext.Description;
        }
Exemplo n.º 31
0
        /// <summary>
        /// Setting up the sender. This is called once the receiver is registered
        /// at the signaling server and is ready to receive an incoming connection.
        /// </summary>
        private void SenderSetup()
        {
            if (sender != null)
            {
                sender.Dispose();
                sender = null;
            }
            Debug.Log("sender:  setup");

            sender = UnityCallFactory.Instance.Create(BenchmarkConfig.NetConfig);
            MediaConfig mediaConf2 = new MediaConfig();

            mediaConf2.Video  = false;
            mediaConf2.Audio  = false;
            sender.CallEvent += Sender_CallEvent;
            sender.Configure(mediaConf2);
        }
Exemplo n.º 32
0
    /// <summary>
    /// Create the default configuration for this CallApp instance.
    /// This can be overwritten in a subclass allowing the creation custom apps that
    /// use a slightly different configuration.
    /// </summary>
    /// <returns></returns>
    public virtual MediaConfig CreateMediaConfig()
    {
        MediaConfig mediaConfig = new MediaConfig();
        //testing echo cancellation (native only)
        bool useEchoCancellation = false;

        if (useEchoCancellation)
        {
#if !UNITY_WEBGL
            var nativeConfig = new Byn.Media.Native.NativeMediaConfig();
            nativeConfig.AudioOptions.echo_cancellation   = true;
            nativeConfig.AudioOptions.extended_filter_aec = true;
            nativeConfig.AudioOptions.delay_agnostic_aec  = true;

            mediaConfig = nativeConfig;
#endif
        }



        //use video and audio by default (the UI is toggled on by default as well it will change on click )
        mediaConfig.Audio           = true;
        mediaConfig.Video           = true;
        mediaConfig.VideoDeviceName = null;

        //This format is the only reliable format that works on all
        //platforms currently.
        mediaConfig.Format = FramePixelFormat.ABGR;

        mediaConfig.MinWidth  = 160;
        mediaConfig.MinHeight = 120;
        //Larger resolutions are possible in theory but
        //allowing users to set this too high is risky.
        //A lot of devices do have great cameras but not
        //so great CPU's which might be unable to
        //encode fast enough.
        mediaConfig.MaxWidth  = 1920;
        mediaConfig.MaxHeight = 1080;

        //will be overwritten by UI in normal use
        mediaConfig.IdealWidth     = 160;
        mediaConfig.IdealHeight    = 120;
        mediaConfig.IdealFrameRate = 30;
        return(mediaConfig);
    }
Exemplo n.º 33
0
 internal static global::System.Runtime.InteropServices.HandleRef getCPtr(MediaConfig obj)
 {
     return (obj == null) ? new global::System.Runtime.InteropServices.HandleRef(null, global::System.IntPtr.Zero) : obj.swigCPtr;
 }
Exemplo n.º 34
0
 public VideoValidator(MediaConfig config)
 {
     this.config = config;
 }
Exemplo n.º 35
0
 public ImageValidator(MediaConfig config)
 {
     this.config = config;
 }