// Disconnect from remote resources, dispose of local resources, and exit. private void disconnectAndExit() { lock (quit[0]) { if (quit[0] == Boolean.True) { return; } quit[0] = Boolean.True; if (pc != null) { pc.Dispose(); pc = null; } if (appRtcClient != null) { appRtcClient.sendMessage("{\"type\": \"bye\"}"); appRtcClient.disconnect(); appRtcClient = null; } if (videoSource != null) { videoSource.Dispose(); videoSource = null; } if (factory != null) { factory.Dispose(); factory = null; } Finish(); } }
public PeerConnectionAdapter( PeerConnectionFactory peerConnectionFactory, VideoRouter videoRouter, IRoom room, IRemoteDevice device, IReadOnlyList <string> stunUrls) { if (peerConnectionFactory is null) { throw new ArgumentNullException(nameof(peerConnectionFactory)); } if (stunUrls is null) { throw new ArgumentNullException(nameof(stunUrls)); } Room = room ?? throw new ArgumentNullException(nameof(room)); Device = device ?? throw new ArgumentNullException(nameof(device)); _videoRouter = videoRouter ?? throw new ArgumentNullException(nameof(videoRouter)); var iceServerInfo = new PeerConnectionConfig.IceServerInfo(); foreach (var url in stunUrls) { iceServerInfo.Urls.Add(url); } var config = new PeerConnectionConfig(); config.IceServers.Add(iceServerInfo); _peerConnectionObserverImpl = new PeerConnectionObserver(); _peerConnectionObserverImpl.IceCandidateAdded += IceCandidateAdded; _peerConnectionObserverImpl.RenegotiationNeeded += RenegotationNeeded; _peerConnectionImpl = peerConnectionFactory.CreatePeerConnection(_peerConnectionObserverImpl, config); }
public PlatformPeerConnectionFactory(Context context) { _context = context; EglBaseContext = EglBaseHelper.Create().EglBaseContext; _peerConnectionfactory = CreatePeerConnectionFactory(_context, EglBaseContext); NativeObject = _peerConnectionfactory; }
public VideoRouter( IThread signallingThread, PeerConnectionFactory peerConnectionFactory) { _signallingThread = signallingThread ?? throw new ArgumentNullException(nameof(signallingThread)); PeerConnectionFactory = peerConnectionFactory ?? throw new ArgumentNullException(nameof(peerConnectionFactory)); }
private void CreatePeerConnectionFactoryInternal() { var encoderFactory = new DefaultVideoEncoderFactory(_eglBase.EglBaseContext, true, true); var decoderFactory = new DefaultVideoDecoderFactory(_eglBase.EglBaseContext); _factory = PeerConnectionFactory.InvokeBuilder() .SetVideoEncoderFactory(encoderFactory) .SetVideoDecoderFactory(decoderFactory) .CreatePeerConnectionFactory(); }
private void Button_Click(object sender, RoutedEventArgs e) { var devices = PeerConnectionFactory.GetDeviceInfo(); ++cameraIndex; if (cameraIndex >= devices.Length) { cameraIndex = 0; } OpenCamera(); }
private void Awake() { // First we run through some Android specifics. Android requires explicit permission requests before the audio context can be created. // Beware that failing these will not necessarily report permission errors, but rather device creation errors. // Additionally, the library prefers the audio in communication mode, so we switch that too. if (Application.platform == RuntimePlatform.Android) { if (!Permission.HasUserAuthorizedPermission(Permission.Microphone)) { Permission.RequestUserPermission(Permission.Microphone); } try { AndroidJavaClass unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer"); AndroidJavaObject activity = unityPlayer.GetStatic <AndroidJavaObject>("currentActivity"); AndroidJavaObject audioManager = activity.Call <AndroidJavaObject>("getSystemService", "audio"); int mode1 = audioManager.Call <Int32>("getMode"); audioManager.Call("setMode", 3); // 3 is Communication Mode int mode2 = audioManager.Call <Int32>("getMode"); Debug.Log($"Android Audio Mode changed from {mode1} to {mode2}"); } catch (Exception e) { Debug.Log(e.ToString()); } } threads = WebRtcThreads.Acquire(); // the default audio device module must be created on the worker thread adm = AudioDeviceModuleFactory.CreateDefault(threads.threads[1]); // by convention 1 is the worker (see call below) // adm is now initialised factory = PeerConnectionFactory.Create( threads.threads[0], threads.threads[1], threads.threads[2], // This is the main signalling thread adm, AudioEncoderFactory.CreateBuiltin(), AudioDecoderFactory.CreateBuiltin(), VideoEncoderFactory.CreateBuiltin(), VideoDecoderFactory.CreateBuiltin(), null, null); playoutHelper = new AudioDeviceHelper(PlayoutDevices, ChangePlayoutDevice); recordingHelper = new AudioDeviceHelper(RecordingDevices, ChangeRecordingDevice); pcs = new List <WebRtcPeerConnection>(); }
private void CloseInternal() { if (_factory != null && _parameters.AecDump) { _factory.StopAecDump(); } _logger.Debug(TAG, "Closing peer connection."); if (_rtcEventLog != null) { // RtcEventLog should stop before the peer connection is disposed. _rtcEventLog.Stop(); _rtcEventLog = null; } _logger.Debug(TAG, "Closing audio source."); if (_audioSource != null) { _audioSource.Dispose(); _audioSource = null; } _logger.Debug(TAG, "Stopping capturer."); if (_videoCapturer != null) { _videoCapturer.StopCapture(); _videoCapturer.Dispose(); _videoCapturerStopped = true; _videoCapturer = null; } _logger.Debug(TAG, "Closing video source."); if (_videoSource != null) { _videoSource.Dispose(); _videoSource = null; } _localRenderer = null; _remoteRenderer = null; _logger.Debug(TAG, "Closing peer connection factory."); if (_factory != null) { _factory.Dispose(); _factory = null; } _logger.Debug(TAG, "Closing peer connection done."); _peerConnectionEvents.OnPeerConnectionClosed(); PeerConnectionFactory.StopInternalTracingCapture(); PeerConnectionFactory.ShutdownInternalTracer(); _executor.Release(); }
public static void Init(Application application, string trialsFields = null, bool enabledInternalTracer = true) { var options = PeerConnectionFactory.InitializationOptions.InvokeBuilder(application) .SetEnableInternalTracer(enabledInternalTracer); if (!string.IsNullOrEmpty(trialsFields)) { options.SetFieldTrials(trialsFields); } PeerConnectionFactory.Initialize(options.CreateInitializationOptions()); Abstraction.NativeFactory.Init(new NativeFactory(application)); }
public PeerConnectionClient(Context context, IEglBase eglBase) { _context = context.ApplicationContext; _eglBase = eglBase; Executor.Execute(new Runnable(() => { PeerConnectionFactory.Initialize(PeerConnectionFactory.InitializationOptions.InvokeBuilder(context) .CreateInitializationOptions()); CreatePeerConnectionFactoryInternal(); })); }
private static void Init(Application application, string fieldTrials = null, bool internalTracerEnabled = true) { var options = PeerConnectionFactory.InitializationOptions.InvokeBuilder(application) .SetEnableInternalTracer(internalTracerEnabled); if (!string.IsNullOrEmpty(fieldTrials)) { options.SetFieldTrials(fieldTrials); } PeerConnectionFactory.Initialize(options.CreateInitializationOptions()); Core.NativeFactory.Initalize(new PlatformFactory(application)); }
private static PeerConnectionFactory CreatePeerConnectionFactory(Context context, IEglBaseContext eglBaseContext) { var audioDeviceModule = CreateAudioDeviceModule(context); var encoderFactory = new DefaultVideoEncoderFactory(eglBaseContext, true, true); var decoderFactory = new DefaultVideoDecoderFactory(eglBaseContext); var factory = PeerConnectionFactory.InvokeBuilder() .SetAudioDeviceModule(audioDeviceModule) .SetVideoEncoderFactory(encoderFactory) .SetVideoDecoderFactory(decoderFactory) .CreatePeerConnectionFactory(); audioDeviceModule.Release(); return(factory); }
/// <summary> /// 创建视频轨道 /// </summary> /// <param name="source">视频源描述</param> /// <returns></returns> public static MediaStreamTrack createVideoTrack(string source) { var index = GetVideoIndexByName(source); var caps = PeerConnectionFactory.GetDeviceCapabilities(index); /* * var matchs = caps.Where(c => c.VideoType == VideoType.I420).ToArray(); * if (matchs.Length > 0) caps = matchs; * VideoDeviceCapabilities cap = null; * if (caps.Length > 0) cap = caps[0]; * else cap = new VideoDeviceCapabilities * { * Width = 640, * Height = 480, * Fps = 30 * }; * return createVideoTrack(Facotry.CreateVideoSource(index, cap.Width - 1, cap.Height - 1, 15) ?? new FrameVideoSource()); */ var video = Facotry.CreateVideoSource(index, 640, 480, 30); return(createVideoTrack(video)); }
protected override void OnCreate(Bundle savedInstanceState) { base.OnCreate(savedInstanceState); Java.Lang.Thread.DefaultUncaughtExceptionHandler = new UnhandledExceptionHandler(this); Window.AddFlags(WindowManagerFlags.Fullscreen); Window.AddFlags(WindowManagerFlags.KeepScreenOn); Point displaySize = new Point(); WindowManager.DefaultDisplay.GetSize(displaySize); vsv = new VideoStreamsView(this, displaySize); SetContentView(vsv); abortUnless(PeerConnectionFactory.InitializeAndroidGlobals(this), "Failed to initializeAndroidGlobals"); AudioManager audioManager = ((AudioManager)GetSystemService(AudioService)); // TODO(fischman): figure out how to do this Right(tm) and remove the // suppression. bool isWiredHeadsetOn = audioManager.WiredHeadsetOn; audioManager.Mode = isWiredHeadsetOn ? Mode.InCall : Mode.InCommunication; audioManager.SpeakerphoneOn = !isWiredHeadsetOn; sdpMediaConstraints = new MediaConstraints(); sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); sdpMediaConstraints.Mandatory.Add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); Intent intent = Intent; if ("Android.intent.action.VIEW".Equals(intent.Action)) { connectToRoom(intent.Data.ToString()); return; } showGetRoomUI(); }
public void onIceServers(IList <PeerConnection.IceServer> iceServers) { factory = new PeerConnectionFactory(); pc = factory.CreatePeerConnection(iceServers, appRtcClient.pcConstraints(), pcObserver); // Uncomment to get ALL WebRTC tracing and SENSITIVE libjingle logging. // NOTE: this _must_ happen while |factory| is alive! // Logging.enableTracing( // "logcat:", // EnumSet.of(Logging.TraceLevel.TRACE_ALL), // Logging.Severity.LS_SENSITIVE); { PeerConnection finalPC = pc; //JAVA TO C# CONVERTER WARNING: The original Java variable was marked 'final': //ORIGINAL LINE: final Runnable repeatedStatsLogger = new Runnable() IRunnable repeatedStatsLogger = new RunnableAnonymousInnerClassHelper(this, finalPC); vsv.PostDelayed(repeatedStatsLogger, 10000); } { logAndToast("Creating local video source..."); MediaStream lMS = factory.CreateLocalMediaStream("ARDAMS"); if (appRtcClient.videoConstraints() != null) { VideoCapturer capturer = VideoCapturer; videoSource = factory.CreateVideoSource(capturer, appRtcClient.videoConstraints()); VideoTrack videoTrack = factory.CreateVideoTrack("ARDAMSv0", videoSource); videoTrack.AddRenderer(new VideoRenderer(new VideoCallbacks(this, vsv, VideoStreamsView.Endpoint.LOCAL))); lMS.AddTrack(videoTrack); } lMS.AddTrack(factory.CreateAudioTrack("ARDAMSa0")); pc.AddStream(lMS, new MediaConstraints()); } logAndToast("Waiting for ICE candidates..."); }
/// <summary> /// 获取单个视频设备支持的所有分辨率 /// </summary> /// <param name="name">视频设备</param> /// <returns>视频设备支持的所有分辨率</returns> public static VideoDeviceCapabilities[] getCapabilities(string name) { var index = GetVideoIndexByName(name); return(PeerConnectionFactory.GetDeviceCapabilities(index)); }
/// <summary> /// 获取所有可用的视频源 /// </summary> /// <returns></returns> public static string[] getVideoDevices() { return(PeerConnectionFactory.GetDeviceInfo().Select(info => info.DeviceName).ToArray()); }
public WebRtcInfraAdapter() { _peerConnectionFactory = new PeerConnectionFactory(); }