public override void OnOpened(CameraDevice cameraDevice) { // This method is called when the camera is opened. We start camera preview here. owner.mCameraOpenCloseLock.Release(); owner.mCameraDevice = cameraDevice; owner.CreateCameraPreviewSession(); }
void CameraStart(CameraDevice device, bool status) { if(!status) Debug.LogError("fail to start: " + device); else Debug.Log("start: " + device); }
/// <summary> /// Gets the device state. /// </summary> /// <param name="device">The device to get the state.</param> /// <returns>Returns the state of the camera device.</returns> /// <since_tizen> 4 </since_tizen> /// <feature> http://tizen.org/feature/camera </feature> /// <exception cref="ArgumentException">In case of invalid parameters.</exception> /// <exception cref="InvalidOperationException">In case of any invalid operations.</exception> /// <exception cref="NotSupportedException">In case of this feature is not supported.</exception> public static CameraDeviceState GetDeviceState(CameraDevice device) { ValidationUtil.ValidateEnum(typeof(CameraDevice), device, nameof(device)); Native.GetDeviceState(device, out var val).ThrowIfFailed("Failed to get the camera device state."); return(val); }
void OnDestroy() { imageTrackablesMap.Clear(); CameraDevice.GetInstance().Stop(); TrackerManager.GetInstance().SetTrackingOption(TrackerManager.TrackingOption.NORMAL_TRACKING); TrackerManager.GetInstance().StopTracker(); TrackerManager.GetInstance().DestroyTracker(); }
public override void OnDisconnected(CameraDevice cameraDevice) { Log.Error(TAG, "OnDisconnected"); owner.mCameraOpenCloseLock.Release(); cameraDevice.Close(); owner.mCameraDevice = null; }
public override void OnDisconnected(CameraDevice camera) { if (renderer != null) { camera.Close(); renderer.CameraDevice = null; } }
public override void OnOpened(CameraDevice camera) { if (renderer != null) { renderer.CameraDevice = camera; renderer.StartPreview(); } }
/// <summary> /// Changes the camera device. /// </summary> /// <param name="device">The hardware camera to access.</param> /// <since_tizen> 3 </since_tizen> /// <feature> http://tizen.org/feature/camera </feature> /// <remarks> /// If display reuse is set using <see cref="DisplayReuseHint"/> /// before stopping the preview, the display will be reused and last frame on the display /// can be kept even though camera device is changed. /// The camera must be in the <see cref="CameraState.Created"/>. /// </remarks> /// <exception cref="ArgumentException">In case of invalid parameters.</exception> /// <exception cref="InvalidOperationException">In case of any invalid operations.</exception> /// <exception cref="NotSupportedException">In case of the ChangeDevice feature is not supported.</exception> /// <exception cref="ObjectDisposedException">The camera already has been disposed of.</exception> public void ChangeDevice(CameraDevice device) { ValidateState(CameraState.Created); ValidationUtil.ValidateEnum(typeof(CameraDevice), device, nameof(device)); CameraErrorFactory.ThrowIfError(Native.ChangeDevice(_handle, device), "Failed to change the camera device"); }
public async Task <Bitmap> GetCameraFeedAsync(CameraDevice cam) { var id = (int)cam; var uri = $"{URL}/car/video_feed/{id}"; var img = await LoadImage(new Uri(uri)); return(img); }
void Start() { manager = GameObject.Find("GameManagerObject").GetComponent <ManagerScript>(); ARCam = CameraDevice.Instance; ui = GameObject.Find("Canvas"); pic = new Texture2D(Screen.width, Screen.height); savePath = Application.temporaryCachePath; }
/// <summary> /// カメラを開放する /// </summary> public void Disconnect() { if (_cameraDevice != null) { _cameraDevice.Close(); _cameraDevice = null; } }
public override void OnOpened ( CameraDevice cameraDevice ) { _openCameraDevice(cameraDevice); _createCameraPreview(); }
public override void OnError(CameraDevice camera, [GeneratedEnum] CameraError error) { mCameraHelper.ReleaseCameraOpenCloseLock(); camera.Close(); Log.Debug(TAG, "CameraDevice onError!"); //Set camera devite as null mCameraHelper.SetCameraDevice(null); }
public override void OnDisconnected(CameraDevice camera) { mCameraHelper.ReleaseCameraOpenCloseLock(); camera.Close(); Log.Debug(TAG, "CameraDevice onDisconnected!"); //Set camera devite as null mCameraHelper.SetCameraDevice(null); }
public CameraCaptureSessionCallback(CameraDevice cameraDevice, Surface surface, object cameraStateLock, Handler backgroundHandler, String tag) { _cameraDevice = cameraDevice; _surface = surface; _cameraStateLock = cameraStateLock; _backgroundHandler = backgroundHandler; _tag = tag; }
public override void OnOpened (CameraDevice camera) { fragment.camera_device = camera; fragment.startPreview (); fragment.opening_camera = false; if (null != fragment.texture_view) fragment.configureTransform (fragment.texture_view.Width, fragment.texture_view.Height); }
public override void OnError(CameraDevice camera, CameraError error) { fragment.cameraOpenCloseLock.Release (); camera.Close (); fragment.cameraDevice = null; if (null != fragment.Activity) fragment.Activity.Finish (); }
private void CloseCameraDevice() { if (cameraDevice != null) { cameraDevice.Close(); cameraDevice = null; } }
public override void OnOpened(CameraDevice camera) { if (Activity != null) { Activity.cameraDevice = camera; Activity.openingCamera = false; } }
public override void OnOpened(CameraDevice camera) { fragment.cameraDevice = camera; fragment.startPreview (); fragment.cameraOpenCloseLock.Release (); if (null != fragment.textureView) fragment.configureTransform (fragment.textureView.Width, fragment.textureView.Height); }
public void Start() { AlertsAPI.instance.Init(); CameraDevice.photo2DPlane = cameraField; backScene = "Description"; CameraDevice.StartCameraDevice(); }
/// <summary> /// Initializes a new instance of the <see cref="Camera"/> class. /// </summary> /// <param name="device">The camera device to access.</param> /// <exception cref="ArgumentException">Invalid CameraDevice type.</exception> /// <exception cref="InvalidOperationException">In case of any invalid operations.</exception> /// <exception cref="NotSupportedException">The camera feature is not supported.</exception> /// <since_tizen> 3 </since_tizen> /// <feature> http://tizen.org/feature/camera </feature> public Camera(CameraDevice device) { ValidationUtil.ValidateEnum(typeof(CameraDevice), device, nameof(device)); CreateCameraDevice(device); Initialize(); }
/// <summary> /// Gets the flash state. /// </summary> /// <param name="device">The device to get the state.</param> /// <returns>Returns the flash state of the camera device.</returns> /// <since_tizen> 3 </since_tizen> /// <feature> http://tizen.org/feature/camera </feature> /// <exception cref="ArgumentException">In case of invalid parameters.</exception> /// <exception cref="InvalidOperationException">In case of any invalid operations.</exception> /// <exception cref="NotSupportedException">In case of this feature is not supported.</exception> public static CameraFlashState GetFlashState(CameraDevice device) { ValidationUtil.ValidateEnum(typeof(CameraDevice), device, nameof(device)); Native.GetFlashState(device, out var val).ThrowIfFailed("Failed to get camera flash state"); return(val); }
public override void OnError(CameraDevice camera, CameraError error) { camera.Close(); if (Activity != null) { Activity.cameraDevice = null; Activity.openingCamera = false; } }
public override void OnDisconnected(CameraDevice camera) { if (Activity != null) { camera.Close(); Activity.cameraDevice = null; Activity.openingCamera = false; } }
public override void OnError (CameraDevice camera, CameraErrorType error) { camera.Close (); fragment.camera_device = null; if (null != fragment.Activity) fragment.Activity.Finish (); fragment.opening_camera = false; }
public void RotateImage() { CameraDevice.RotateImage(); foreach (GameObject obj in rotateObjects) { obj.transform.Rotate(0, 0, 90); } }
public void StopCamera() { if (cameraStartDone) { ResultCode result = CameraDevice.GetInstance().Stop(); Debug.Log("Unity StopCamera. result : " + result); cameraStartDone = false; } }
public override void OnError(CameraDevice cameraDevice, CameraError error) { cameraDevice.Close(); Owner.mCameraDevice = null; if (Owner == null) { return; } }
protected override void Start() { if (!CameraDevice.isAvailable()) { throw new UIPopupException(typeof(CameraDevice) + " not available"); } base.Start(); }
void StopCamera() { if (cameraStartDone) { Debug.Log("Unity StopCamera"); CameraDevice.GetInstance().Stop(); cameraStartDone = false; } }
public override void OnOpened(CameraDevice camera) { if (Fragment != null) { Fragment.mCameraDevice = camera; Fragment.StartPreview(); Fragment.mOpeningCamera = false; } }
public override void OnPause() { base.OnPause(); if (mCameraDevice != null) { mCameraDevice.Close(); mCameraDevice = null; } }
public override void OnDisconnected(CameraDevice camera) { if (Fragment != null) { camera.Close(); Fragment.mCameraDevice = null; Fragment.mOpeningCamera = false; } }
public override void OnPause() { base.OnPause(); if (null != camera_device) { camera_device.Close(); camera_device = null; } }
public override void OnDisconnected(CameraDevice camera) { if (this.Fragment != null) { this.Fragment.mCameraOpenCloseLock.Release(); camera.Close(); this.Fragment.cameraDevice = null; } }
public override void OnOpened (CameraDevice camera) { if (Fragment != null) { Fragment.mCameraDevice = camera; Fragment.StartPreview (); Fragment.mOpeningCamera = false; } }
public override void OnError(CameraDevice cameraDevice, CameraError error) { owner.mCameraOpenCloseLock.Release(); cameraDevice.Close(); owner.mCameraDevice = null; if (owner == null) return; Activity activity = owner.Activity; if (activity != null) { activity.Finish(); } }
/// <summary> /// Calculates the screen space parameters for an oriented bounding box (center, half extents, rotation) specified in camera frame coordinates. /// The calculation is based on the current screen orientation. /// </summary> public static OrientedBoundingBox CameraFrameToScreenSpaceCoordinates(OrientedBoundingBox cameraFrameObb, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData) { bool isPortrait = false; float obbRotation = 0.0f; switch (QCARRuntimeUtilities.ScreenOrientation) { case ScreenOrientation.Portrait: obbRotation += 90.0f; isPortrait = true; break; case ScreenOrientation.LandscapeRight: obbRotation += 180.0f; break; case ScreenOrientation.PortraitUpsideDown: obbRotation += 270.0f; isPortrait = true; break; } var scaleX = bgTextureViewPortRect.width / (isPortrait ? videoModeData.height : videoModeData.width); var scaleY = bgTextureViewPortRect.height / (isPortrait ? videoModeData.width : videoModeData.height); var center = CameraFrameToScreenSpaceCoordinates(cameraFrameObb.Center, bgTextureViewPortRect, isTextureMirrored, videoModeData); var halfExtents = new Vector2(cameraFrameObb.HalfExtents.x * scaleX, cameraFrameObb.HalfExtents.y * scaleY); var rotation = cameraFrameObb.Rotation; if (isTextureMirrored) rotation = -rotation; rotation = rotation*180.0f/Mathf.PI + obbRotation; return new OrientedBoundingBox(center, halfExtents, rotation); }
/// <summary> /// Calculates a position in screen space coordinates based on the current orientation and background config for a given screen-space position /// </summary> public static Vector2 CameraFrameToScreenSpaceCoordinates(Vector2 cameraFrameCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData) { float viewportOrigX = bgTextureViewPortRect.xMin; float viewportOrigY = bgTextureViewPortRect.yMin; float viewportSizeX = bgTextureViewPortRect.width; float viewportSizeY = bgTextureViewPortRect.height; bool isPortrait = false; float textureSizeX = videoModeData.width; float textureSizeY = videoModeData.height; float prefixX = 0.0f; float prefixY = 0.0f; float inversionMultiplierX = 0.0f; float inversionMultiplierY = 0.0f; PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait); // normalize the coordinates within viewport between 0 and 1 float normalizedCoordX = (cameraFrameCoordinate.x/textureSizeX - prefixX)/inversionMultiplierX; float normalizedCoordY = (cameraFrameCoordinate.y/textureSizeY - prefixY)/inversionMultiplierY; Vector2 result; // convert from screen coordinates to texture coordinates if (isPortrait) { result = new Vector2(viewportSizeX * normalizedCoordY + viewportOrigX, viewportSizeY * normalizedCoordX + viewportOrigY); } else { result = new Vector2(viewportSizeX * normalizedCoordX + viewportOrigX, viewportSizeY * normalizedCoordY + viewportOrigY); } return result; }
private bool ChangeCameraDirection(CameraDevice.CameraDirection direction) { bool directionSupported = false; bool needsObjectTrackerRestart = stopRunningObjectTracker(); CameraDevice.Instance.Stop(); CameraDevice.Instance.Deinit(); if (CameraDevice.Instance.Init(direction)) { directionSupported = true; } CameraDevice.Instance.Start(); if (needsObjectTrackerRestart) restartRunningObjectTracker(); return directionSupported; }
private void setupMetaPropertiesForOtherCam(CameraDevice cameraDevice, string snapShotUrl, string mjpegUrl) { try { var externalIp = getExternalIp(); // Get the external ip of this camera var snapShotUri = new Uri(snapShotUrl); cameraDevice.externalIp = snapShotUri.Host + ":" + snapShotUri.Port; } catch { } }
public override void OnPause () { base.OnPause (); if (null != camera_device) { camera_device.Close (); camera_device = null; } }
public override void OnPause () { base.OnPause (); if (mCameraDevice != null) { mCameraDevice.Close (); mCameraDevice = null; } }
private bool ChangeCameraDirection(CameraDevice.CameraDirection direction) { // This takes care of stopping and starting the targetFinder internally upon switching the camera CloudRecoBehaviour cloudRecoBehaviour = GameObject.FindObjectOfType(typeof(CloudRecoBehaviour)) as CloudRecoBehaviour; cloudRecoBehaviour.CloudRecoEnabled = false; bool directionSupported = false; bool needsObjectTrackerRestart = stopRunningObjectTracker(); CameraDevice.Instance.Stop(); CameraDevice.Instance.Deinit(); if(CameraDevice.Instance.Init(direction)) { directionSupported = true; } CameraDevice.Instance.Start(); if (needsObjectTrackerRestart) restartRunningObjectTracker(); cloudRecoBehaviour.CloudRecoEnabled = true; return directionSupported; }
public override void OnDisconnected (CameraDevice camera) { if (Fragment != null) { camera.Close (); Fragment.mCameraDevice = null; Fragment.mOpeningCamera = false; } }
/// <summary> /// Calculates a position in camera frame coordinates based on the current orientation and background config for a given screen-space position /// </summary> public static QCARRenderer.Vec2I ScreenSpaceToCameraFrameCoordinates(Vector2 screenSpaceCoordinate, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData) { float viewportOrigX = bgTextureViewPortRect.xMin; float viewportOrigY = bgTextureViewPortRect.yMin; float viewportSizeX = bgTextureViewPortRect.width; float viewportSizeY = bgTextureViewPortRect.height; bool isPortrait = false; float textureSizeX = videoModeData.width; float textureSizeY = videoModeData.height; float prefixX = 0.0f; float prefixY = 0.0f; float inversionMultiplierX = 0.0f; float inversionMultiplierY = 0.0f; PrepareCoordinateConversion(isTextureMirrored, ref prefixX, ref prefixY, ref inversionMultiplierX, ref inversionMultiplierY, ref isPortrait); // normalize the coordinates within viewport between 0 and 1 float normalizedCoordX = (screenSpaceCoordinate.x - viewportOrigX) / viewportSizeX; float normalizedCoordY = (screenSpaceCoordinate.y - viewportOrigY) / viewportSizeY; QCARRenderer.Vec2I result; // convert from screen coordinates to texture coordinates if (isPortrait) { result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordY)) * textureSizeX), Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordX)) * textureSizeY)); } else { result = new QCARRenderer.Vec2I(Mathf.RoundToInt((prefixX + (inversionMultiplierX * normalizedCoordX)) * textureSizeX), Mathf.RoundToInt((prefixY + (inversionMultiplierY * normalizedCoordY)) * textureSizeY)); } return result; }
private bool ChangeCameraDirection(CameraDevice.CameraDirection direction) { bool directionSupported = false; CameraDevice.Instance.Stop(); CameraDevice.Instance.Deinit(); if(CameraDevice.Instance.Init(direction)) { directionSupported = true; } CameraDevice.Instance.Start(); return directionSupported; }
/// <summary> /// /// </summary> /// <param name="stringToReplace"></param> /// <param name="device"></param> /// <returns></returns> public static string replaceStringValues(string stringToReplace, CameraDevice device) { return stringToReplace.Replace("{password}", device.password).Replace("{username}", device.userName); }
public override void OnError (CameraDevice camera, CameraErrorType error) { camera.Close(); if (Fragment != null) { Fragment.mCameraDevice = null; Activity activity = Fragment.Activity; Fragment.mOpeningCamera = false; if (activity != null) { activity.Finish (); } } }
/// <summary> /// Called when the server pulls a saved device from the db. /// </summary> /// <param name="dbDevice"></param> /// <returns></returns> public HaDevice trackDevice(HaDeviceDto dbDevice) { var haDev = new CameraDevice() { deviceId = dbDevice.deviceId, providerDeviceId = dbDevice.uniqueName, deviceName = dbDevice.deviceName }; try { // See if there is meta data available for snapshot/mjpg url's var snapShotUrl = getDeviceMetadata(haDev.deviceId, META_SNAPSHOT_URL); var mjpegUrl = getDeviceMetadata(haDev.deviceId, META_MJPEG_URL); if (!string.IsNullOrEmpty(snapShotUrl)) { haDev.liveStreamUrl = mjpegUrl; haDev.snapShotUrl = snapShotUrl; Thread t = new Thread(() => { setupMetaPropertiesForOtherCam(haDev, snapShotUrl, mjpegUrl); }); t.IsBackground = true; t.Start(); } else { // Foscam only // Get the ip, username and password from the devicename try { var split = dbDevice.uniqueName.Split('|'); haDev.ip = split[0].Replace("http://", ""); haDev.userName = split[1]; haDev.password = split[2]; } catch { } haDev.liveStreamUrl = string.Format("http://{0}{1}", haDev.ip, replaceStringValues(LIVESTREAM_URL, haDev)); Thread t = new Thread(() => { setupMetaProperties(haDev); }); t.IsBackground = true; t.Start(); } } catch { } localDevices.Add(haDev); return haDev; }
/// <summary> /// /// </summary> private void getSnapshot(CameraDevice device) { if (device != null && !isCurrentlySnapshotting(device)) { lock (dictionaryLockObject) { currentSnapshots[device.name] = true; } var url = ""; if (!string.IsNullOrEmpty(device.snapShotUrl)) { // non foscam url = device.snapShotUrl; } else { url = string.Format("http://{0}{1}", device.ip, replaceStringValues(SNAPSHOT_URL, device)); } try { var tempFile = Path.GetTempFileName(); using (var c = new WebClient()) { c.DownloadFile(url, tempFile); } // Write the timestamp on the image var message = DateTime.Now.ToString(); using (var image = (Bitmap)Image.FromFile(tempFile)) { using (var graphics = Graphics.FromImage(image)) { using (var arialFont = new Font("Arial", 12)) { PointF firstLocation = new PointF(10f, 10f); graphics.DrawString(DateTime.Now.ToString(), arialFont, Brushes.White, firstLocation); } graphics.Flush(); } using (MemoryStream ms = new MemoryStream()) { image.Save(ms, ImageFormat.Jpeg); device.b64Image = Convert.ToBase64String(ms.ToArray()); } } File.Delete(tempFile); } catch (Exception ex) { var msg = string.Format("Foscam: Error getting snapshot from URL: {0}. {1}", url, ex.Message); writeLog("Foscam: Error getting snapshot from URL: " + url, ex); raiseOnDeviceError(device.deviceId, msg); try { //device.b64Image = ex.Message; } catch { } } finally { // Tell it there's no longer a snapshot happening lock (dictionaryLockObject) { currentSnapshots[device.name] = false; } } } else { writeLog("Foscam: Existing snapshot detected. Skipping snapshot for device " + device.name); } }
private static UIImagePickerControllerCameraDevice GetUICameraDevice (CameraDevice device) { switch (device) { case CameraDevice.Front: return UIImagePickerControllerCameraDevice.Front; case CameraDevice.Rear: return UIImagePickerControllerCameraDevice.Rear; default: throw new NotSupportedException(); } }
/// <summary> /// /// </summary> /// <param name="device"></param> /// <returns></returns> private bool isCurrentlySnapshotting(CameraDevice device) { bool isSnapping = false; lock (dictionaryLockObject) { if (currentSnapshots.ContainsKey(device.name)) { isSnapping = currentSnapshots[device.name]; } else { // create the key currentSnapshots.Add(device.name, false); } } return isSnapping; }
public CameraOptions() { this.Camera = CameraDevice.Rear; }
/// <summary> /// /// </summary> private void setupMetaProperties(CameraDevice cameraDevice) { try { var externalIp = getExternalIp(); // Replace the internal ip with the external if (cameraDevice.ip.Contains(":")) { var splitStr = cameraDevice.ip.Split(':'); cameraDevice.externalIp = externalIp + ":" + splitStr[splitStr.Length - 1]; } else { cameraDevice.externalIp = externalIp; } cameraDevice.liveStreamExternalUrl = string.Format("http://{0}{1}", cameraDevice.externalIp, replaceStringValues(LIVESTREAM_URL, cameraDevice)); cameraDevice.panDownUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_DOWN_URL), cameraDevice); cameraDevice.panDownStopUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_DOWN_STOP_URL), cameraDevice); cameraDevice.panUpUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_UP_URL), cameraDevice); cameraDevice.panUpStopUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_UP_STOP_URL), cameraDevice); cameraDevice.panLeftUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_LEFT_URL), cameraDevice); cameraDevice.panLeftStopUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_LEFT_STOP_URL), cameraDevice); cameraDevice.panRightUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_RIGHT_URL), cameraDevice); cameraDevice.panRightStopUrl = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_RIGHT_STOP_URL), cameraDevice); cameraDevice.setPreset1Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_SETPRESET1_URL), cameraDevice); cameraDevice.gotoPreset1Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_GETPRESET1_URL), cameraDevice); cameraDevice.setPreset2Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_SETPRESET2_URL), cameraDevice); cameraDevice.gotoPreset2Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_GETPRESET2_URL), cameraDevice); cameraDevice.setPreset3Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_SETPRESET3_URL), cameraDevice); cameraDevice.gotoPreset3Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_GETPRESET3_URL), cameraDevice); cameraDevice.setPreset4Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_SETPRESET4_URL), cameraDevice); cameraDevice.gotoPreset4Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_GETPRESET4_URL), cameraDevice); cameraDevice.setPreset5Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_SETPRESET5_URL), cameraDevice); cameraDevice.gotoPreset5Url = replaceStringValues(base.getDeviceMetadata(cameraDevice.deviceId, META_GETPRESET5_URL), cameraDevice); } catch (Exception ex) { writeLog("Problem setting up device meta data", ex); } }
// Helper function to automatically create an option list of an enum object. private static CameraDevice.FocusMode EnumOptionList( CameraDevice.FocusMode setMode) { Type modeType = setMode.GetType(); // Get possible enum values. CameraDevice.FocusMode[] modes = (CameraDevice.FocusMode[])Enum.GetValues(modeType); // Setup style for list. GUIStyle optionListStyle = new GUIStyle(GUI.skin.button); optionListStyle.stretchHeight = true; optionListStyle.stretchWidth = true; // Setup style for toggles. // We use "button" style as template because default toggles are too // small. GUIStyle toggleStyle = new GUIStyle(GUI.skin.button); toggleStyle.stretchHeight = true; toggleStyle.stretchWidth = true; toggleStyle.normal.textColor = Color.gray; toggleStyle.onNormal.textColor = Color.gray; toggleStyle.focused.textColor = Color.gray; toggleStyle.onFocused.textColor = Color.gray; toggleStyle.active.textColor = Color.gray; toggleStyle.onActive.textColor = Color.gray; toggleStyle.hover.textColor = Color.gray; toggleStyle.onHover.textColor = Color.gray; // Setup style for active toggle. // Setting active values for the toggle Style does not work so we create // another style. GUIStyle activeToggleStyle = new GUIStyle(toggleStyle); activeToggleStyle.normal.textColor = Color.white; activeToggleStyle.onNormal.textColor = Color.white; activeToggleStyle.focused.textColor = Color.white; activeToggleStyle.onFocused.textColor = Color.white; activeToggleStyle.active.textColor = Color.white; activeToggleStyle.onActive.textColor = Color.white; activeToggleStyle.hover.textColor = Color.white; activeToggleStyle.onHover.textColor = Color.white; CameraDevice.FocusMode newMode = setMode; // We render the menu over the full screen. GUILayout.BeginArea(new Rect(0, 0, Screen.width, Screen.height)); GUILayout.BeginVertical(); foreach (CameraDevice.FocusMode mode in modes) { if (mode == setMode) { GUILayout.Toggle(true, mode.ToString(), activeToggleStyle); } else { if (GUILayout.Toggle(false, mode.ToString(), toggleStyle)) { newMode = mode; } } } GUILayout.EndVertical(); GUILayout.EndArea(); return newMode; }
private Rect ScreenSpaceRectFromCamSpaceRectData(RectangleIntData camSpaceRectData, Rect bgTextureViewPortRect, bool isTextureMirrored, CameraDevice.VideoModeData videoModeData) { Vector2 topLeftSSLandscape = QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(new Vector2(camSpaceRectData.leftTopX, camSpaceRectData.leftTopY), bgTextureViewPortRect, isTextureMirrored, videoModeData); Vector2 bottomRightSSLandscape = QCARRuntimeUtilities.CameraFrameToScreenSpaceCoordinates(new Vector2(camSpaceRectData.rightBottomX, camSpaceRectData.rightBottomY), bgTextureViewPortRect, isTextureMirrored, videoModeData); return QCARRuntimeUtilities.CalculateRectFromLandscapeLeftCorners(topLeftSSLandscape, bottomRightSSLandscape, isTextureMirrored); }
private void CloseCamera() { try { cameraOpenCloseLock.Acquire(); if(null != cameraDevice) { cameraDevice.Close(); cameraDevice = null; } if(null != mediaRecorder) { mediaRecorder.Release(); mediaRecorder = null; } } catch (InterruptedException e) { throw new RuntimeException ("Interrupted while trying to lock camera closing."); } finally { cameraOpenCloseLock.Release (); } }
public override void OnPause() { base.OnPause (); if (mCameraDevice != null) { mCameraDevice.Close (); mCameraDevice = null; mCameraManager = null; cCharacteristics = null; } }