private void Awake() { _presentCamera.targetTexture = new RenderTexture(Screen.width, Screen.height, 24); _pastCamera.targetTexture = new RenderTexture(Screen.width, Screen.height, 24); _futureCamera.targetTexture = new RenderTexture(Screen.width, Screen.height, 24); cameras = new Dictionary <CameraType, Camera>(); activeLenses = new Dictionary <CameraType, bool>(); cameras.Add(CameraType.Present, _presentCamera); cameras.Add(CameraType.Past, _pastCamera); cameras.Add(CameraType.Future, _futureCamera); activeLenses.Add(CameraType.Present, true); activeLenses.Add(CameraType.Past, true); activeLenses.Add(CameraType.Future, true); currentCameraType = CameraType.Present; Shader.SetGlobalTexture("_TimeView", _presentCamera.targetTexture); _displayCamera.cullingMask = LayerMask.GetMask(currentCameraType.ToString()); _OutlineCamera.cullingMask = LayerMask.GetMask(currentCameraType.ToString()); Active = false; _cameraModel.SetActive(false); recAudioSource = GetComponent <AudioSource>(); recAudioSource.loop = true; }
public void ChangeDimension() { if (!Active) { return; } int layerMask = LayerMask.GetMask(currentCameraType.ToString()); if (_displayCamera.cullingMask == layerMask) { return; } ChangingDimension?.Invoke(); StartCoroutine(FlashCamera()); _displayCamera.cullingMask = layerMask; _OutlineCamera.cullingMask = layerMask; int layer = LayerMask.NameToLayer(currentCameraType.ToString()); gameObject.layer = layer; LayerUtils.SetChildLayerRecursivly(transform, layer); foreach (GameObject obj in _objectsToSwitchDimensions) { obj.layer = layer; LayerUtils.SetChildLayerRecursivly(obj.transform, layer); } SetActive(false, false); }
public float cameraFocus = 2f; // how high above the characters head is the camera aimed? private void Start() { target = GameObject.FindGameObjectWithTag("Player").transform; cameraoption = cameraType.ToString(); //converts camera type to a string, making it easier for me to work with cameraToPlayerVector = transform.position - target.position; rotationVector = cameraToPlayerVector; playerInput = GetComponent <PlayerInput>(); }
private void Start() { if (PlayerPrefs.HasKey("cameraType")) { if (camera.ToString().ToUpper() == PlayerPrefs.GetString("cameraType").ToUpper()) { GetComponent <UICheckbox>().isChecked = true; } else { GetComponent <UICheckbox>().isChecked = false; } } }
//UPDATE ATTACHED CAMERA /// <summary></summary> /// <returns></returns> bool UpdateAttachedCamera() { if (!cameraPrefab || !cameraPrefab.name.Contains(cameraType.ToString() + "Camera")) //NOTE: We could use .ToUpper() so this is not case sensitive, but for performance reasons we do not...keep this in mind if you add new cameras. { //TODO if not usescenecamera //DestroySpawnedCamera(); //DESTROY OLD CAMERA LoadCameraPrefab(); //LOAD CAMERA PREFAB //cameraSpawned = false; return(true); } return(false); }
protected override bool OnKeyDown(KeyEvent e) { //If atop openly any window to not process if (IsPaused()) { return(base.OnKeyDown(e)); } //change camera type if (e.Key == EKeys.F7) { cameraType = (CameraType)((int)GetRealCameraType() + 1); if (cameraType == CameraType.Count) { cameraType = (CameraType)0; } FreeCameraEnabled = cameraType == CameraType.Free; GameEngineApp.Instance.AddScreenMessage("Camera type: " + cameraType.ToString()); return(true); } //if (e.Key == EKeys.Escape) //{ // Controls.Add(new MenuWindow()); //} return(base.OnKeyDown(e)); }
public void SwapCamera(CameraType cameraType) { if (!Active || currentCameraType == cameraType || !activeLenses[cameraType]) { return; } currentCameraType = cameraType; _selectedTimelineText.text = currentCameraType.ToString(); if (currentCameraType == CameraType.Present) { _selectedTimelineText.color = Color.green; } else if (currentCameraType == CameraType.Past) { _selectedTimelineText.color = Color.yellow; } else if (currentCameraType == CameraType.Future) { _selectedTimelineText.color = Color.red; } StartCoroutine(TurnSwitch()); Shader.SetGlobalTexture("_TimeView", cameras[currentCameraType].targetTexture); }
public void TakePicture(CameraType cameraType, Action <string, MediaResult> callback) { Util.NullArgumentTest(callback); if (mPickerController != null) { Debug.Log("Ignoring TakePicture call because another image picker UI is being shown."); return; } callback = RuntimeHelper.ToMainThread(callback); if (!IsCameraAvailable(cameraType)) { callback(cameraType.ToString() + " camera is not supported on this device.", null); return; } // Create a new image picker. var picker = InteropObjectFactory <UIImagePickerController> .Create( () => new UIImagePickerController(), c => c.ToPointer()); // Source type must be camera. picker.SourceType = UIImagePickerController.SourceTypeEnum.Camera; // Set camera type. picker.CameraDevice = cameraType == CameraType.Front ? UIImagePickerController.CameraDeviceEnum.Front : UIImagePickerController.CameraDeviceEnum.Rear; // Only allow image. NSMutableArray <NSString> mediaTypes = new NSMutableArray <NSString>(); mediaTypes.AddObject(UTTypeConstants.kUTTypeImage); picker.MediaTypes = mediaTypes; // Create a delegate for the TBM VC. picker.Delegate = new InternalUIImagePickerControllerDelegate(InternalUIImagePickerControllerDelegate.PickerOperation.TakePicture) { CloseAndResetVC = () => { if (mPickerController != null) { mPickerController.DismissViewController(true, null); mPickerController = null; } }, CompleteCallback = (error, result) => { callback(error, result); } }; // Store the VC ref. mPickerController = picker; // Now show the VC. using (var unityVC = UIViewController.UnityGetGLViewController()) unityVC.PresentViewController(picker, true, null); }
void OnGUI() { this.flexibleWindow.Show(() => { this.currentEnum = this.toolbarEnum.Show(); this.currentObject = this.toolbarObject.Show(); this.currentInt = this.toolbarInt.Show(); }); Debug.Log(currentEnum.ToString() + " / " + currentObject.ToString() + " / " + currentInt.ToString()); }
public void Move(CameraType type, CameraDirection direction) { string d; switch (direction) { case CameraDirection.ZoomIn: d = "zoom+"; break; case CameraDirection.ZoomOut: d = "zoom-"; break; default: d = direction.ToString().ToLower(); break; } Codec.Send(string.Format("camera {0} move {1}", type.ToString().ToLower(), d)); }
public override ListViewItem GetListViewItem() { ListViewItem m = new ListViewItem(""); m.SubItems.Add(Position.X.ToString("#####0.############")); m.SubItems.Add(Position.Y.ToString("#####0.############")); m.SubItems.Add(Position.Z.ToString("#####0.############")); m.SubItems.Add(Angle.X.ToString("#####0.############")); m.SubItems.Add(Angle.Y.ToString("#####0.############")); m.SubItems.Add(Angle.Z.ToString("#####0.############")); m.SubItems.Add(Viewpoint1.X.ToString("#####0.############")); m.SubItems.Add(Viewpoint1.Y.ToString("#####0.############")); m.SubItems.Add(Viewpoint1.Z.ToString("#####0.############")); m.SubItems.Add(Viewpoint2.X.ToString("#####0.############")); m.SubItems.Add(Viewpoint2.Y.ToString("#####0.############")); m.SubItems.Add(Viewpoint2.Z.ToString("#####0.############")); m.SubItems.Add(FieldOfViewBegin.ToString()); m.SubItems.Add(FieldOfViewBeginSine.ToString("#####0.############")); m.SubItems.Add(FieldOfViewBeginCosine.ToString("#####0.############")); m.SubItems.Add(FieldOfViewEnd.ToString()); m.SubItems.Add(FieldOfViewEndSine.ToString("#####0.############")); m.SubItems.Add(FieldOfViewEndCosine.ToString("#####0.############")); m.SubItems.Add(FovSpeed.ToString()); m.SubItems.Add(CameraType.ToString()); m.SubItems.Add(LinkedRoute.ToString()); m.SubItems.Add(RouteSpeed.ToString()); m.SubItems.Add(PointSpeed.ToString()); m.SubItems.Add(Duration.ToString()); m.SubItems.Add(NextCamera.ToString()); m.SubItems.Add(FirstIntroCamera.ToString()); m.SubItems.Add(HexUtil.GetHexReverse(Unknown5)); return(m); }
private void InitialiseCamera() { switch (_cameraType) { default: Debug.LogError("'_cameraType' isn't assigned! Defaulted to " + DEFAULT_CAMERA_TYPE.ToString()); _cameraType = DEFAULT_CAMERA_TYPE; goto case DEFAULT_CAMERA_TYPE; case CameraType.FirstPerson: InitialiseFirstPerson(); break; case CameraType.ThirdPerson: InitialiseThirdPerson(); break; case CameraType.Freelook: InitialiseFreelook(); break; } }
public override void Serialize(JsonWriter writer) { writer.WriteStartObject(); if (Orthographic != null) { writer.WritePropertyName("orthographic"); Orthographic.Serialize(writer); } if (Perspective != null) { writer.WritePropertyName("perspective"); Perspective.Serialize(writer); } writer.WritePropertyName("type"); writer.WriteValue(Type.ToString()); base.Serialize(writer); writer.WriteEndObject(); }
void Awake() { #if !NATCAM cameraType = CameraType.UnityWebCam; #endif switch (cameraType) { case CameraType.UnityWebCam: CameraSource = new UnityWebCamCameraSource(); break; case CameraType.NatCam: #if NATCAM CameraSource = new NatCamCameraSource(ConvertToResolution(natCamResolution)); #endif break; default: throw new NotImplementedException(cameraType.ToString()); } Debug.Log("Initialized camera source: " + CameraSource); }
} //END SetResourcesLoadedTexture //-----------------------------// private IEnumerator SetTextureFromStreamingAssetsBytes( CameraType cameraType, CubeSide cubeSide, ImageType imageType ) //-----------------------------// { if( showDebug ) { Debug.Log( "ImageFactory.cs SetStreamingAssetsLoadedTexture() start" ); } WWW www; //First we need to get the actual image files that are stored in the StreamingAssets folder as byte files string texturePath = DatabaseStringHelper.CreateStreamingAssetsPath( "Textures/" + imageType.ToString() + "/" + cameraType.ToString() + "/" + cubeSide.ToString() + ".bytes", DatabaseStringHelper.StringStyle.WithEscapeUriAndSystemPathCombine ); string existsPath = DatabaseStringHelper.CreateStreamingAssetsPathForFileExistsCheck( "Textures/" + imageType.ToString() + "/" + cameraType.ToString() + "/" + cubeSide.ToString() + ".bytes" ); bool exists = false; //Check if the image exists as bytes, this check is different by platform if( Application.platform == RuntimePlatform.Android ) { //Android can only check using www www = new WWW( existsPath ); yield return www; exists = string.IsNullOrEmpty( www.error ); } else { //All other platforms can check using System.IO exists = System.IO.File.Exists( existsPath ); } //If the texture does not exist in bytes format, try to load it as a .jpg file if( !exists ) { //Debug.Log( "ImageFactory.cs image does not exist as bytes, trying as jpg... existsPath = " + existsPath ); texturePath = "Textures/" + imageType.ToString() + "/" + cameraType.ToString() + "/" + cubeSide.ToString() + ".jpg"; texturePath = DatabaseStringHelper.CreateStreamingAssetsPath( texturePath, DatabaseStringHelper.StringStyle.WithEscapeUriAndSystemPathCombine ); } else { //Debug.Log( "ImageFactory.cs image exists as bytes! ... existsPath = " + existsPath ); } //Download the image bytes from the StreamingAssets folder www = new WWW( texturePath ); //Continue with the main thread until this finishes while( !www.isDone ) { yield return www; } //Setup the appropriate amount of texture memory for the bytes SetLoadedTextureScaleAndSettings( cameraType, cubeSide, WWWHelper.GetPlatformPreferredTextureFormat(), false, www.textureNonReadable.width, www.textureNonReadable.height ); //The bytes are loaded and the texture memory is set, let's turn the bytes into a texture in OpenGL if( cameraType == CameraType.Left ) { if( cubeSide == CubeSide.front ) { texture_Left_front = www.textureNonReadable; } else if( cubeSide == CubeSide.back ) { texture_Left_back = www.textureNonReadable; } else if( cubeSide == CubeSide.left ) { texture_Left_left = www.textureNonReadable; } else if( cubeSide == CubeSide.right ) { texture_Left_right = www.textureNonReadable; } else if( cubeSide == CubeSide.top ) { texture_Left_top = www.textureNonReadable; } else if( cubeSide == CubeSide.bottom ) { texture_Left_bottom = www.textureNonReadable; } } else if( cameraType == CameraType.Right ) { if( cubeSide == CubeSide.front ) { texture_Right_front = www.textureNonReadable; } else if( cubeSide == CubeSide.back ) { texture_Right_back = www.textureNonReadable; } else if( cubeSide == CubeSide.left ) { texture_Right_left = www.textureNonReadable; } else if( cubeSide == CubeSide.right ) { texture_Right_right = www.textureNonReadable; } else if( cubeSide == CubeSide.top ) { texture_Right_top = www.textureNonReadable; } else if( cubeSide == CubeSide.bottom ) { texture_Right_bottom = www.textureNonReadable; } } //Clear the existing bytes from memory www.Dispose(); //Now that the texture is set in OpenGL, make sure the textures are set to Clamped (Hides seams between textures) SetLoadedTextureWrapMode( cameraType, cubeSide, TextureWrapMode.Clamp ); //The texture is now loaded! Set the appropriate boolean to true SetLoadedBool( cameraType, cubeSide, true ); } //END SetStreamingAssetsLoadedTexture
} //END SetTextureFromResourcesTexture //-----------------------------// private IEnumerator SetTextureFromResourcesBytes( CameraType cameraType, CubeSide cubeSide, ImageType imageType ) //-----------------------------// { if( showDebug ) { Debug.Log( "ImageFactory.cs SetResourcesLoadedTexture() start" ); } //First we need to know how big the image scale is (width, height), we store a text file for each image // at each image's parent folder (EX: Resources/Textures/Astronaut/scale) ... Resources calls don't use file format extension (EX: .jpg, .txt, .bytes ) ResourceRequest request = Resources.LoadAsync<TextAsset>( "Textures/" + imageType.ToString() + "/scale" ); //Wait until the file has been read while( !request.isDone ) { yield return request; } //Now that the file has been read, turn it into a TextAsset, and from that we can read it as a string TextAsset textAsset = request.asset as TextAsset; //We have the scale text file, grab the x and y scale as ints string[] scales = textAsset.text.Split( new char[] { 'x' } ); int scaleX = int.Parse( scales[ 0 ] ); int scaleY = int.Parse( scales[ 1 ] ); //When we're done, make sure we unload anything we grabbed from Resources Resources.UnloadAsset( textAsset ); //Get the bytes from the Resources folder string bytesPath = "Textures/" + imageType.ToString() + "/" + cameraType.ToString() + "/" + cubeSide.ToString(); //This has to be done as a TextAsset, and from there we can grab the bytes request = Resources.LoadAsync<TextAsset>( bytesPath ); //Wait until the file has been read while( !request.isDone ) { yield return request; } //Now that the file has been read, turn it into a TextAsset, and from that we can turn it into a .bytes textAsset = request.asset as TextAsset; //Create a new texture variable with the appropriate scale and settings SetLoadedTextureScaleAndSettings( cameraType, cubeSide, WWWHelper.GetPlatformPreferredTextureFormat(), false, scaleX, scaleY ); //Set the texture's wrapping mode to clamped, which prevents seeing the edges of the skybox's cubemap SetLoadedTextureWrapMode( cameraType, cubeSide, TextureWrapMode.Clamp ); //Set the bytes to the appropriate texture if( cameraType == CameraType.Left ) { if( cubeSide == CubeSide.front ) { texture_Left_front.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.back ) { texture_Left_back.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.left ) { texture_Left_left.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.right ) { texture_Left_right.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.top ) { texture_Left_top.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.bottom ) { texture_Left_bottom.LoadImage( textAsset.bytes ); } } else if( cameraType == CameraType.Right ) { if( cubeSide == CubeSide.front ) { texture_Right_front.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.back ) { texture_Right_back.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.left ) { texture_Right_left.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.right ) { texture_Right_right.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.top ) { texture_Right_top.LoadImage( textAsset.bytes ); } else if( cubeSide == CubeSide.bottom ) { texture_Right_bottom.LoadImage( textAsset.bytes ); } } //When we're done, make sure we unload anything we grabbed from Resources Resources.UnloadAsset( textAsset ); //This texture is loaded, set the appropriate bool SetLoadedBool( cameraType, cubeSide, true ); } //END SetResourcesLoadedTexture
} //END SetDictionaryLoadedTexture //-----------------------------// private void SetTextureFromResourcesTexture( CameraType cameraType, CubeSide cubeSide, ImageType imageType ) //-----------------------------// { if( showDebug ) { Debug.Log( "ImageFactory.cs SetTextureFromResourcesTexture() start" ); } //Set the scale of the texture to grab based on whether this device can support high end textures int scale = 2048; string size = "Large"; if( PlatformHelper.IsLowEndDevice() ) { scale = 1024; size = "Small"; } //Get the texture from the Resources folder string path = "Textures/Images/" + size.ToString() + "/" + imageType.ToString() + "/" + cameraType.ToString() + "/" + cubeSide.ToString(); //If the texture doesn't exist, change the path to the opposite camera (allows us to use Mono images instead of Stereo) if( Resources.Load<Texture2D>( path ) == null ) { CameraType camera = CameraType.Left; if( cameraType == CameraType.Left ) { camera = CameraType.Right; } path = "Textures/Images/" + size.ToString() + "/" + imageType.ToString() + "/" + camera.ToString() + "/" + cubeSide.ToString(); } if( showDebug ) { Debug.Log( "ImageFactory.cs SetTextureFromResourcesTexture() path = " + path ); } //Create a new texture variable with the appropriate scale and settings SetLoadedTextureScaleAndSettings( cameraType, cubeSide, WWWHelper.GetPlatformPreferredTextureFormat(), false, scale, scale ); //Set the texture's wrapping mode to clamped, which prevents seeing the edges of the skybox's cubemap SetLoadedTextureWrapMode( cameraType, cubeSide, TextureWrapMode.Clamp ); //Set the bytes to the appropriate texture if( cameraType == CameraType.Left ) { if( cubeSide == CubeSide.front ) { texture_Left_front = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.back ) { texture_Left_back = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.left ) { texture_Left_left = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.right ) { texture_Left_right = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.top ) { texture_Left_top = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.bottom ) { texture_Left_bottom = Resources.Load<Texture2D>( path ); } } else if( cameraType == CameraType.Right ) { if( cubeSide == CubeSide.front ) { texture_Right_front = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.back ) { texture_Right_back = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.left ) { texture_Right_left = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.right ) { texture_Right_right = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.top ) { texture_Right_top = Resources.Load<Texture2D>( path ); } else if( cubeSide == CubeSide.bottom ) { texture_Right_bottom = Resources.Load<Texture2D>( path ); } } //This texture is loaded, set the appropriate bool SetLoadedBool( cameraType, cubeSide, true ); } //END SetTextureFromResourcesTexture
/// <summary> /// Gets the cameras. /// </summary> /// <param name="cameraType">Type of the camera.</param> /// <param name="name">The name.</param> /// <returns></returns> public IObservable<Camera> GetCameras(CameraType cameraType, string name) { var cachedCameras = new CachedEntity<List<Camera>>(string.Format("Services.Camera.{0}.{1}", cameraType, name)); return new AnonymousObservable<Camera>(observer => { if (!cachedCameras.Value.Any() || !cachedCameras.CachedDateTime.HasValue || cachedCameras.CachedDateTime.Value <= DateTime.Now.AddDays(-1)) { Uri cameraListUri = null; if (cameraType == CameraType.Regional) { cameraListUri = new Uri(string.Format("{0}?roadway={1}", DfwTranstarUri.CamerasRegionalUri, name.Replace(' ', '_'))); } else { cameraListUri = new Uri(string.Format("{0}?list={1}&roadway={2}", DfwTranstarUri.CamerasUri, cameraType.ToString().ToLower(), name.Replace(' ', '_'))); } IObservable<string> observable = ObservableHelper.TextFromUri(cameraListUri); var svc = observable.Subscribe(result => { int tableStart = result.IndexOf("<table id=\"ListTable\""); int tableEnd = result.IndexOf("</table>", tableStart); XElement xmlDoc = XElement.Parse(result.Substring(tableStart, (tableEnd + 8) - tableStart)); var cameras = from item in xmlDoc.Element("tr").Element("td").Element("ul").Elements("li") select new Camera { ID = this.GetCameraID(cameraType, item.Element("a").Attribute("href").Value), Name = item.Value, EnableThumbnails = ProfileCommon.UserSettings.ShowCameraThumbnails, ImageUri = cameraType == CameraType.Regional ? string.Format(DfwTranstarUri.CameraDetailRegionalUri, name.ToLower() .Replace(" north", "") .Replace(" south", "") .Replace(" east", "") .Replace(" west", ""), this.GetCameraID(cameraType, item.Element("a").Attribute("href").Value)) : string.Format(DfwTranstarUri.CameraDetailUri, this.GetCameraID(cameraType, item.Element("a").Attribute("href").Value)) }; cachedCameras.Value = cameras.ToList(); cachedCameras.Cache(); cachedCameras.Value.ToList().ForEach(c => observer.OnNext(c)); observer.OnCompleted(); }, ex => { observer.OnError(new Exception("Unable to retrieve traffic data.", ex)); }); } else { cachedCameras.Value.ToList().ForEach(c => observer.OnNext(c)); observer.OnCompleted(); } return Disposable.Empty; }); }
/// <summary> /// Gets the roadway cameras. /// </summary> /// <param name="cameraType">Type of the camera.</param> /// <returns></returns> public IObservable<Roadway> GetRoadwayCameras(CameraType cameraType) { var cachedRoadways = new CachedEntity<List<Roadway>>("Services.RoadwayCameras." + cameraType.ToString()); return new AnonymousObservable<Roadway>(observer => { if (!cachedRoadways.Value.Any() || cachedRoadways.CachedDateTime <= DateTime.Now.AddDays(-1)) { Uri uri = null; switch (cameraType) { case CameraType.Freeway: uri = new Uri(DfwTranstarUri.CamerasUri); break; case CameraType.Street: uri = new Uri(DfwTranstarUri.CamerasUri + "?list=street"); break; case CameraType.Regional: uri = new Uri(DfwTranstarUri.CamerasRegionalUri); break; } IObservable<string> observable = ObservableHelper.TextFromUri(uri); var svc = observable.Subscribe(result => { int tableStart = result.IndexOf("<table id=\"ListTable\""); int tableEnd = result.IndexOf("</table>", tableStart); XElement xmlDoc = XElement.Parse(result.Substring(tableStart, (tableEnd + 8) - tableStart)); var roadways = from item in xmlDoc.Element("tr").Element("td").Element("ul").Elements("li") select new Roadway { Name = item.Value, IconUri = cameraType == CameraType.Freeway ? this.GetRoadwayIconUri(item.Value) : "/Resources/Images/empty_svg_small.png" }; cachedRoadways.Value = roadways.ToList(); cachedRoadways.Cache(); cachedRoadways.Value.ToList().ForEach(r => observer.OnNext(r)); observer.OnCompleted(); }, ex => { observer.OnError(new Exception("Unable to retrieve traffic data.", ex)); }); } else { cachedRoadways.Value.ToList().ForEach(r => observer.OnNext(r)); observer.OnCompleted(); } return Disposable.Empty; }); }
public void TriggerCamera(CameraType c) { animator.SetTrigger(c.ToString()); }
public Task <IEventTransmitter> CreateTransmitter(string segmentId, CameraType cameraId) { var configuredColor = _configurationReader.GetConfigValue <string>($"CONSOLE_{cameraId.ToString().ToUpper().Replace("-", "")}_COLOR", false); if (!Enum.TryParse(configuredColor, true, out ConsoleColor consoleColor)) { consoleColor = cameraId == CameraType.Camera1 ? ConsoleColor.Green : ConsoleColor.Red; } return(Task.FromResult <IEventTransmitter>(new ConsoleTransmitter { ConsoleColor = consoleColor })); }
/// <summary> /// Get a <see cref="Camera"/> that was registered by Morro. /// </summary> /// <param name="cameraType">The basic camera you want to get.</param> /// <returns>The registered camera with the given name.</returns> public static Camera GetCamera(CameraType cameraType) { return(GetCamera($"Morro_{cameraType.ToString()}")); }
public async Task <IEventTransmitter> CreateTransmitter(string segmentId, CameraType cameraId) { try { return(new IoTEdgeModuleTransmitter(_configurationReader.GetConfigValue("EDGE_MODULE_OUTPUT", false, cameraId.ToString()))); } catch (Exception e) { _logger.Error(e, $"An error occurred when trying to create iot edge transmitter: {e.Message}"); throw; } }
public void Stop(CameraType type) { Codec.Send(string.Format("camera {0} move stop", type.ToString().ToLower())); }
protected override string validateFieldValues() { id = id.ToLower(); if ((type == CameraType.h264_rtsp_proxy || type == CameraType.vlc_transcode) && Environment.OSVersion.Platform != PlatformID.Win32NT) { return("0The " + type.ToString() + " camera type is incompatible with the current server environment, which is " + Environment.OSVersion.Platform.ToString()); } if (string.IsNullOrWhiteSpace(name)) { return("0Camera name must not contain only whitespace."); } if (!Util.IsAlphaNumeric(name, true)) { return("0Camera name must be alphanumeric, but may contain spaces."); } if (!Util.IsAlphaNumeric(id, false)) { return("0Camera ID must be alphanumeric and not contain any spaces."); } if (minPermissionLevel < 0 || minPermissionLevel > 100) { return("0Permission Level must be between 0 and 100."); } if (maxBufferTime < 3 && (type == CameraType.jpg || type == CameraType.mjpg)) { return("0Idle Timeout can't be below 3 for a " + type.ToString() + " camera."); } if (maxBufferTime < 10 && (type == CameraType.h264_rtsp_proxy || type == CameraType.vlc_transcode)) { return("0Idle Timeout can't be below 10 for a " + type.ToString() + " camera."); } if (maxBufferTime > 86400) { return("0Idle Timeout can't be above 86400. A short value (below 30) is recommended."); } if (delayBetweenImageGrabs < 0) { return("0The Image Grab Delay can't be less than 0."); } if (delayBetweenImageGrabs > 600000) { return("0The Image Grab Delay can't be greater than 600000."); } if (this.h264_video_width < 0) { return("0Video Width must be >= 0."); } if (this.h264_video_height < 0) { return("0Video Height must be >= 0."); } if (type == CameraType.vlc_transcode && this.vlc_transcode_fps <= 0) { return("0Transcode Frame Rate must be > 0 for a " + type.ToString() + " camera."); } if (type == CameraType.vlc_transcode && this.vlc_transcode_buffer_time <= 0) { return("0Buffer Time must be > 0 for a " + type.ToString() + " camera."); } if (type == CameraType.vlc_transcode && (this.vlc_transcode_image_quality < 0 || this.vlc_transcode_image_quality > 100)) { return("0Image Quality must be between 0 and 100 (inclusive) for a " + type.ToString() + " camera."); } if (this.ptzType == PtzType.Dev) { return("0PTZ Type Dev is obsolete and will be removed from a future version. Please choose another PTZ type."); } if (ptz_panorama_selection_rectangle_width_percent < 0.0 || ptz_panorama_selection_rectangle_width_percent > 1.0) { return("0PTZ Panorama Selection Rectangle Width must be between 0.0 and 1.0"); } if (ptz_panorama_selection_rectangle_height_percent < 0.0 || ptz_panorama_selection_rectangle_height_percent > 1.0) { return("0PTZ Panorama Selection Rectangle Height must be between 0.0 and 1.0"); } if (ptz_idleresetpositionX < 0.0 || ptz_idleresetpositionX > 1.0) { return("0PTZ Idle Pan Position (X) must be between 0.0 and 1.0"); } if (ptz_idleresetpositionY < 0.0 || ptz_idleresetpositionY > 1.0) { return("0PTZ Idle Tilt Position (Y) must be between 0.0 and 1.0"); } if (ptz_idleresetpositionZ < 0.0 || ptz_idleresetpositionZ > 1.0) { return("0PTZ Idle Zoom Position (Z) must be between 0.0 and 1.0"); } if (ptz_idleresettimeout < 10 || ptz_idleresettimeout > 604800) { return("0PTZ Idle Position timeout must be between 10 and 604800"); } return("1"); }
public void SetPreset(CameraType type, uint presetNumber) { Codec.Send(string.Format("preset {0} set {1}", type.ToString().ToLower(), presetNumber)); }
public void Select(CameraType type, int camera) { Codec.Send(string.Format("camera {0} {1}", type.ToString().ToLower(), camera)); }