AVCaptureDeviceInput pickCamera() { AVCaptureDevicePosition desiredPosition = AVCaptureDevicePosition.Back; bool hadError = false; foreach (var device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == desiredPosition) { NSError error = null; AVCaptureDeviceInput input = AVCaptureDeviceInput.FromDevice(device, out error); if (error != null) { hadError = true; displayErrorOnMainQueue(error, "Could not initialize for AVMediaTypeVideo"); } else if (session.CanAddInput(input)) { return(input); } } } if (!hadError) { displayErrorOnMainQueue(null, "No camera found for requested orientation"); } return(null); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); outputSession = new AVCaptureStillImageOutput(); var dict = new NSMutableDictionary(); dict[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); CaptureSession.AddOutput(outputSession); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession); previewLayer.Frame = Bounds; previewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; previewLayer.Orientation = AVCaptureVideoOrientation.Portrait; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (CameraOption == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } ConfigureCameraForDevice(device); captureDeviceInput = AVCaptureDeviceInput.FromDevice(device); CaptureSession.AddInput(captureDeviceInput); Layer.AddSublayer(previewLayer); output = new AVCaptureStillImageOutput { OutputSettings = new NSDictionary(AVVideo.CodecKey, AVVideo.CodecJPEG) }; CaptureSession.AddOutput(output); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
public static CaptureSession Create(SelfView parent) { // create a device input and attach it to the session var captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); if (captureDevice == null) { return(null); } var input = AVCaptureDeviceInput.FromDevice(captureDevice); if (input == null) { return(null); } var output = new AVCaptureMetadataOutput(); var cs = new CaptureSession(parent, input, output); // This must be set after the output is added to the sesssion output.MetadataObjectTypes = AVMetadataObjectType.Face; return(cs); }
private void InitSession() { try { //init capture session _AVSession = new AVCaptureSession(); //check permissions var authorizationStatus = AVCaptureDevice.GetAuthorizationStatus(AVMediaType.Video); if (authorizationStatus != AVAuthorizationStatus.Authorized) { return; } //check capture camera var cameras = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var camera = cameras.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back); if (camera == null) { return; } //add input to capture session _AVDeviceImput = new AVCaptureDeviceInput(camera, out NSError _); if (_AVSession.CanAddInput(_AVDeviceImput)) { _AVSession.AddInput(_AVDeviceImput); } else { return; } //add output to camera session _MetadataObjectsQueue = new DispatchQueue("metadata objects queue"); _AVMetadataOutput = new AVCaptureMetadataOutput(); if (_AVSession.CanAddOutput(_AVMetadataOutput)) { _AVSession.AddOutput(_AVMetadataOutput); } else { return; } _AVMetadataOutput.SetDelegate(this, _MetadataObjectsQueue); //init the video preview layer and add it to the current view _AVVideoPeviewLayer = new AVCaptureVideoPreviewLayer(_AVSession); _AVVideoPeviewLayer.VideoGravity = AVLayerVideoGravity.ResizeAspectFill; _AVVideoPeviewLayer.Frame = Bounds; this.Layer.AddSublayer(_AVVideoPeviewLayer); //start capture session StartSession(true); } catch (Exception ex) { Console.WriteLine("IOS_SCAN | init error", ex); } }
private void OnRunningChanged(NSObservedChange change) { var isSessionRunning = ((NSNumber)change.NewValue).BoolValue; DispatchQueue.MainQueue.DispatchAsync(() => { this.CameraButton.Enabled = isSessionRunning && AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).Length > 1; this.MetadataObjectTypesButton.Enabled = isSessionRunning; this.SessionPresetsButton.Enabled = isSessionRunning; this.ZoomSlider.Enabled = isSessionRunning; this.ZoomSlider.MaxValue = (float)NMath.Min(this.videoDeviceInput.Device.ActiveFormat.VideoMaxZoomFactor, 8); this.ZoomSlider.Value = (float)(this.videoDeviceInput.Device.VideoZoomFactor); // After the session stop running, remove the metadata object overlays, // if any, so that if the view appears again, the previously displayed // metadata object overlays are removed. if (!isSessionRunning) { this.RemoveMetadataObjectOverlayLayers(); } // When the session starts running, the aspect ration of the video preview may also change if a new session present was applied . // To keep the preview view's region of interest within the visible portion of the video preview, the preview view's region of // interest will need to be updates. if (isSessionRunning) { this.PreviewView.SetRegionOfInterestWithProposedRegionOfInterest(this.PreviewView.RegionOfInterest); } }); }
/// <summary> /// 初期化処理 /// </summary> private void Initialize() { previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; AVCaptureDevice[] videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); AVCaptureDevicePosition cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); //キャプチャデバイスの生成に失敗している場合エラー if (device == null) { Console.WriteLine("CameraStartup Error"); return; } var input = new AVCaptureDeviceInput(device, out NSError error); CaptureSession.AddInput(input); // カメラの映像表示設定 Layer.AddSublayer(previewLayer); // 映像出力設定後、少しだけ待機 Thread.Sleep(300); // 出力開始 CaptureSession.StartRunning(); IsPreviewing = true; // キャプチャー出力設定 PhotoOutput = new AVCapturePhotoOutput(); CaptureSession.AddOutput(PhotoOutput); }
private void _updateFlasMode(CameraFlashMode flashMode) { captureSession.BeginConfiguration(); var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { if (device.Position == AVCaptureDevicePosition.Back) { var avFlashMode = (AVCaptureFlashMode)Enum.ToObject(typeof(AVCaptureFlashMode), flashMode); if (device.IsFlashModeSupported(avFlashMode)) { try { NSError err; device.LockForConfiguration(out err); } catch { return; } device.FlashMode = avFlashMode; device.UnlockForConfiguration(); } } } captureSession.CommitConfiguration(); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var device = videoDevices.FirstOrDefault(); if (device == null) { return; } var input = new AVCaptureDeviceInput(device, out NSError error); CaptureSession.AddInput(input); WantsLayer = true; Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); IsPreviewing = true; }
//async Task RegisterFaces() //{ // try // { // var persongroupId = Guid.NewGuid().ToString(); // await FaceServiceHelper.CreatePersonGroupAsync(persongroupId, // "Xamarin", // AppDelegate.WorkspaceKey); // await FaceServiceHelper.CreatePersonAsync(persongroupId, "Albert Einstein"); // var personsInGroup = await FaceServiceHelper.GetPersonsAsync(persongroupId); // await FaceServiceHelper.AddPersonFaceAsync(persongroupId, personsInGroup[0].PersonId, // "https://upload.wikimedia.org/wikipedia/commons/d/d3/Albert_Einstein_Head.jpg", null, null); // await FaceServiceHelper.TrainPersonGroupAsync(persongroupId); // isFaceRegistered = true; // } // catch (FaceAPIException ex) // { // Console.WriteLine(ex.Message); // isFaceRegistered = false; // } //} private void PrepareCamera() { captureSession.SessionPreset = AVCaptureSession.PresetMedium; captureDevice = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front) ?? AVCaptureDevice.GetDefaultDevice(AVMediaType.Video); BeginSession(); }
static AVCaptureDevice GetMicrophone() { var devices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Audio); foreach (AVCaptureDevice device in devices) { if (device.LocalizedName.ToLower().Contains ("microphone") == true) return device; } return null; }
static AVCaptureDevice GetCamera (string localizedDeviceName) { var devices = AVCaptureDevice.DevicesWithMediaType (AVMediaType.Video); foreach (AVCaptureDevice device in devices) { if (string.Compare (device.LocalizedName, localizedDeviceName, true) == 0) return device; } return null; }
/// <summary> /// Returns an audio device /// </summary> /// <returns> /// The audio device. /// </returns> AVCaptureDevice AudioDevice() { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Audio); if (devices.Length > 0) { return(devices [0]); } return(null); }
private AVCaptureDevice CameraWithPosition(AVCaptureDevicePosition pos) { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var dev in devices) { if (dev.Position == pos) { return(dev); } } return(null); }
/// <summary> /// Videos the device available for passed in position. /// </summary> /// <returns> /// The available device /// </returns> /// <param name='position'> /// The desired Position. /// </param> static AVCaptureDevice VideoDeviceWithPosition(AVCaptureDevicePosition position) { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { if (device.Position == position) { return(device); } } return(null); }
public AVCaptureDevice GetCameraForOrientation(AVCaptureDevicePosition orientation) { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { if (device.Position == orientation) { return(device); } } return(null); }
// TODO - need better method of device detection than localized string private static AVCaptureDevice getMicrophone() { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Audio); foreach (AVCaptureDevice device in devices) { if (device.LocalizedName.ToLower().Contains("microphone") == true) { return(device); } } return(null); }
// TODO - need better method of device detection than localized string private static AVCaptureDevice getCamera(string localizedDeviceName) { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (AVCaptureDevice device in devices) { if (string.Compare(device.LocalizedName, localizedDeviceName, true) == 0) { return(device); } } return(null); }
void Initialize() { Configuration.IsScanning = true; SizeChange(); CaptureSession = new AVCaptureSession(); CaptureSession.BeginConfiguration(); this.Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = new CGRect(0f, 0f, UIScreen.MainScreen.Bounds.Width, UIScreen.MainScreen.Bounds.Width), VideoGravity = AVLayerVideoGravity.ResizeAspectFill, }; setPreviewOrientation(); var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = AVCaptureDevicePosition.Back; //var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); CaptureSession.AddInput(input); CaptureSession.SessionPreset = AVFoundation.AVCaptureSession.Preset640x480; Layer.AddSublayer(previewLayer); CaptureSession.CommitConfiguration(); CaptureSession.StartRunning(); VideoDataOutput = new AVCaptureVideoDataOutput(); VideoDataOutput.AlwaysDiscardsLateVideoFrames = true; VideoDataOutput.WeakVideoSettings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }.Dictionary; captureVideoDelegate = new CaptureVideoDelegate(); captureVideoDelegate.OnDetected += (list) => { this.OnDetected?.Invoke(list); CaptureSession.StopRunning(); }; VideoDataOutput.SetSampleBufferDelegateQueue(captureVideoDelegate, CoreFoundation.DispatchQueue.MainQueue); CaptureSession.AddOutput(VideoDataOutput); }
public AVCaptureDevice GetBackCamera() { var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); foreach (var device in devices) { if (device.Position == AVCaptureDevicePosition.Back) { return(device); } } return(null); }
static AVCaptureDevice CreateDevice(string mediaType, AVCaptureDevicePosition position) { AVCaptureDevice[] devices = AVCaptureDevice.DevicesWithMediaType(mediaType); AVCaptureDevice captureDevice = devices [0]; foreach (var device in devices) { if (device.Position == position) { captureDevice = device; break; } } return(captureDevice); }
public Task SwapCameraAsync() { if (session != null) { var currentCameraInput = session.Inputs[0]; AVCaptureDevice newCamera; if (currentCameraInput.GetPosition() == AVCaptureDevicePosition.Back) { newCamera = AVCaptureDevice .DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); } else { newCamera = AVCaptureDevice .DevicesWithMediaType(AVMediaType.Video) .FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Back); } if (newCamera != null) { session.BeginConfiguration(); session.RemoveInput(currentCameraInput); NSError error = null; var newInput = new AVCaptureDeviceInput(newCamera, out error); if (error == null) { session.AddInput(newInput); CameraPanel = currentCameraInput.GetPosition() == AVCaptureDevicePosition.Back ? CameraPanel.Front : CameraPanel.Back; } else { //rollback session.RemoveInput(currentCameraInput); } session.CommitConfiguration(); } } return(Task.FromResult <object>(null)); }
public Task <MediaDeviceInfo[]> EnumerateDevices() { var cameraCaptureDevices = Webrtc.RTCCameraVideoCapturer.CaptureDevices .Select(device => new MediaDeviceInfo { DeviceId = device.UniqueID, GroupId = device.ModelID, Kind = MediaDeviceInfoKind.VideoInput, Label = device.LocalizedName }); var audioCaptureDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Audio) .Select(device => new MediaDeviceInfo { DeviceId = device.UniqueID, GroupId = device.ModelID, Kind = MediaDeviceInfoKind.AudioInput, Label = device.LocalizedName }); #if TESTING //// TESTING TO GET LIST OF AUDIO OUTPUT DEVICES /// Apple don't want developers to change the output route/volume programmically. /// https://stackoverflow.com/questions/29999393/avaudiosession-output-selection var x = Webrtc.RTCAudioSession.SharedInstance(); var xouts = x.OutputDataSources; var xins = x.InputDataSources; var y = AVAudioSession.SharedInstance(); y.SetCategory(AVAudioSessionCategory.PlayAndRecord /*, AVAudioSessionCategoryOptions.DefaultToSpeaker*/); y.SetActive(true); var cr = y.CurrentRoute; var outs2 = cr.Outputs; var ins2 = cr.Inputs; var outs = y.OutputDataSources; var ins = y.InputDataSources; var xxxx = outs; y.SetActive(false); #endif return(Task.FromResult(cameraCaptureDevices.Concat(audioCaptureDevices).ToArray())); }
/// <summary> /// Flip the camera /// </summary> protected void Flip() { if (!CameraAvailable) { return; } Session?.StopRunning(); try { Session?.BeginConfiguration(); if (Session != null) { foreach (var input in Session.Inputs) { Session.RemoveInput(input); } var position = VideoInput.Device.Position == AVCaptureDevicePosition.Front ? AVCaptureDevicePosition.Back : AVCaptureDevicePosition.Front; foreach (var device in AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)) { if (device.Position == position) { NSError error; VideoInput = new AVCaptureDeviceInput(device, out error); Session.AddInput(VideoInput); } } } Session?.CommitConfiguration(); } catch { } Session?.StartRunning(); }
void Initialize() { CaptureSession = new AVCaptureSession(); previewLayer = new AVCaptureVideoPreviewLayer(CaptureSession) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.Resize, }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraDeviceOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; CaptureDevice = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (CaptureDevice == null) { return; } ConfigureCameraForDevice(CaptureDevice); NSError error; var input = new AVCaptureDeviceInput(CaptureDevice, out error); StillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; CaptureSession.AddOutput(StillImageOutput); CaptureSession.AddInput(input); Layer.AddSublayer(previewLayer); CaptureSession.StartRunning(); PositionCameraPreview(null); IsPreviewing = true; }
public void SetupLiveCameraStream() { try { AVCaptureDevice frontCamera = null; var devices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); frontCamera = devices.FirstOrDefault(device => device.Position == AVCaptureDevicePosition.Front); //Choose the front camera. ConfigureCameraForDevice(frontCamera); captureSession = new AVCaptureSession(); var captureDevice = AVCaptureDevice.DefaultDeviceWithMediaType(AVMediaType.Video); captureDeviceInput = AVCaptureDeviceInput.FromDevice(frontCamera); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); //Configuring the ouput for frame to be captured. var settings = new CVPixelBufferAttributes { PixelFormatType = CVPixelFormatType.CV32BGRA }; using (var output = new AVCaptureVideoDataOutput { WeakVideoSettings = settings.Dictionary }) { queue = new DispatchQueue("myQueue"); outputRecorder = new OutputRecorder(); output.SetSampleBufferDelegateQueue(outputRecorder, queue); captureSession.AddOutput(output); } captureSession.StartRunning(); DebugHelper.DisplayAnnouncement("CameraStream activated"); } catch (Exception e) { DebugHelper.DisplayError(e); } }
public AVCamCameraView() { #region UIVIew adds session = new AVCaptureSession(); VideoPreviewLayer = new AVCaptureVideoPreviewLayer(session) { Frame = Bounds, VideoGravity = AVLayerVideoGravity.ResizeAspectFill }; var videoDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var cameraPosition = (cameraOptions == CameraOptions.Front) ? AVCaptureDevicePosition.Front : AVCaptureDevicePosition.Back; var device = videoDevices.FirstOrDefault(d => d.Position == cameraPosition); if (device == null) { return; } NSError error; var input = new AVCaptureDeviceInput(device, out error); session.AddInput(input); Layer.AddSublayer(VideoPreviewLayer); //session.StartRunning(); IsPreviewing = true; #endregion // Create a device discovery session. videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession.Create( new AVCaptureDeviceType[] { AVCaptureDeviceType.BuiltInWideAngleCamera, AVCaptureDeviceType.BuiltInDualCamera }, AVMediaType.Video, AVCaptureDevicePosition.Unspecified ); // Communicate with the session and other session objects on this queue. sessionQueue = new DispatchQueue("session queue", false); setupResult = AVCamSetupResult.Success; }
private async void SetupCameraStream() { // Implementation based on https://blog.xamarin.com/how-to-display-camera-ios-avfoundation/ captureSession = new AVCaptureSession(); var viewLayer = CameraStreamingView.Layer; videoPreviewLayer = new AVCaptureVideoPreviewLayer(captureSession) { Frame = this.View.Frame }; CameraStreamingView.Layer.AddSublayer(videoPreviewLayer); AVCaptureDevice[] captureDevices = AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video); var frontDevice = captureDevices.FirstOrDefault(d => d.Position == AVCaptureDevicePosition.Front); if (frontDevice != null) { ConfigureCameraForDevice(frontDevice); captureDeviceInput = AVCaptureDeviceInput.FromDevice(frontDevice); captureSession.AddInput(captureDeviceInput); var dictionary = new NSMutableDictionary(); dictionary[AVVideo.CodecKey] = new NSNumber((int)AVVideoCodec.JPEG); stillImageOutput = new AVCaptureStillImageOutput() { OutputSettings = new NSDictionary() }; captureSession.AddOutput(stillImageOutput); captureSession.StartRunning(); } else { await UserDialogs.Instance.AlertAsync("Sorry, no front camera could be detected :-("); } cameraExists = frontDevice != null; TakePhotoButton.Enabled = cameraExists; }
void RunningChanged(NSObservedChange obj) { var isSessionRunning = ((NSNumber)obj.NewValue).BoolValue; DispatchQueue.MainQueue.DispatchAsync(() => { MetadataObjectTypesButton.Enabled = isSessionRunning; SessionPresetsButton.Enabled = isSessionRunning; CameraButton.Enabled = isSessionRunning && AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video).Length > 1; ZoomSlider.Enabled = isSessionRunning; ZoomSlider.MaxValue = (float)NMath.Min(videoDeviceInput.Device.ActiveFormat.VideoMaxZoomFactor, 8); ZoomSlider.Value = (float)(videoDeviceInput.Device.VideoZoomFactor); // After the session stop running, remove the metadata object overlays, // if any, so that if the view appears again, the previously displayed // metadata object overlays are removed. if (!isSessionRunning) { RemoveMetadataObjectOverlayLayers(); } }); }