private IEnumerable <CameraModel> GetCameras(Func <string[], bool> filterMethod) { try { using (StreamReader sr = new StreamReader(@DB_LOCATION)) { sr.ReadLine(); var cameras = new List <CameraModel>(); string line; while ((line = sr.ReadLine()) != null) { if (LineIsValid(line)) { var values = line.Split(";"); if (filterMethod(values)) { var camera = new CameraModel(values[0], float.Parse(values[1]), float.Parse(values[2])); cameras.Add(camera); } } } return(cameras); } } catch (Exception e) { throw new Exception("Could not read the data file."); } }
public PlayerController(Data data, InputModel inputModel, PlayerModel playerModel, PauseModel pauseModel) { _controllerList = new ControllerList(); var cameraFactory = new CameraFactory(data.CameraData); var laserFactory = new LaserFactory(); var cameraModel = new CameraModel(cameraFactory); var moveController = new MoveController(inputModel.GetInputKeyboard(), data.PlayerData, playerModel.Transform); var shootController = new ShootController(data.BulletData, playerModel, laserFactory); var cameraController = new CameraController(cameraModel, playerModel, data.CameraData, pauseModel); var explosion = new Explosion(data.ExplosionData, playerModel); var abilityController = new AbilityController(inputModel, explosion); _controllerList.Add(moveController).Add(shootController). Add(cameraController).Add(abilityController).Initialize(); }
/// <summary> /// Parses the camera setup from a directory containing an "images" folder with an image dataset from the Stanford Light Field Archive, and saves the parsed setup in this directory. /// </summary> public void ParseCameraSetup() { // Inform of process start. Debug.Log(GeneralToolkit.FormatScriptMessage(this.GetType(), "Started parsing camera setup for an image dataset from the Stanford Light Field Archive located at: " + dataHandler.colorDirectory + ".")); // Get the files in the "images" folder. FileInfo[] fileInfos = GeneralToolkit.GetFilesByExtension(dataHandler.colorDirectory, ".png"); // Determine the pixel resolution of the images. Texture2D tempTex = new Texture2D(1, 1); GeneralToolkit.LoadTexture(fileInfos[0].FullName, ref tempTex); Vector2Int pixelResolution = new Vector2Int(tempTex.width, tempTex.height); DestroyImmediate(tempTex); // Prepare repositioning around center if it is selected. Vector3 meanPos = Vector3.zero; // Reset the camera models to fit the color count. _cameraSetup.ResetCameraModels(); _cameraSetup.cameraModels = new CameraModel[dataHandler.sourceColorCount]; // Iteratively add each camera model to the setup. for (int iter = 0; iter < dataHandler.sourceColorCount; iter++) { CameraModel cameraModel = _cameraSetup.AddCameraModel(iter); // Store the image's pixel resolution in the camera model. cameraModel.pixelResolution = pixelResolution; // Store the image's name in the camera model. FileInfo fileInfo = fileInfos[iter]; cameraModel.SetCameraReferenceIndexAndImageName(cameraModel.cameraReferenceIndex, fileInfo.Name); // Store the image's position in the model. string[] split = fileInfo.Name.Split('_'); float positionY = -GeneralToolkit.ParseFloat(split[split.Length - 3]); float positionX = GeneralToolkit.ParseFloat(split[split.Length - 2]); Vector3 pos = scaleFactor * new Vector3(positionX, positionY, 0); cameraModel.transform.position = pos; meanPos += pos; } // If it is selected, reposition the camera setup around its center position. if (repositionAroundCenter) { meanPos /= dataHandler.sourceColorCount; for (int iter = 0; iter < dataHandler.sourceColorCount; iter++) { CameraModel cameraModel = _cameraSetup.cameraModels[iter]; cameraModel.transform.position = cameraModel.transform.position - meanPos; } } // Temporarily move the color images to a safe location. string tempDirectoryPath = Path.Combine(GeneralToolkit.GetDirectoryBefore(dataHandler.dataDirectory), "temp"); GeneralToolkit.Move(PathType.Directory, dataHandler.colorDirectory, tempDirectoryPath); // Save the camera setup information (this would also have cleared the "images" folder if it was still there). Acquisition.Acquisition.SaveAcquisitionInformation(dataHandler, cameraSetup); // Move the color images back into their original location. GeneralToolkit.Delete(dataHandler.colorDirectory); GeneralToolkit.Move(PathType.Directory, tempDirectoryPath, dataHandler.colorDirectory); // Update the camera models of the setup object. _cameraSetup.FindCameraModels(); // Inform of end of process. Debug.Log(GeneralToolkit.FormatScriptMessage(this.GetType(), "Finished parsing camera setup. Result can be previewed in the Scene view.")); }
/// <summary> /// Cập nhật camera /// </summary> /// <param name="model"></param> /// <returns></returns> public void UpdateCamera(CameraModel model) { try { using (SqliteConnection db = new SqliteConnection(Constants.PathDatabase)) { db.Open(); using (SqliteCommand sqlCommand = new SqliteCommand()) { sqlCommand.Connection = db; sqlCommand.CommandText = "UPDATE ConfigCamera SET CameraIP=@CameraIP, CameraType=@CameraType, CameraUser=@CameraUser, CameraPass=@CameraPass, StreaURI=@StreaURI, IndexView=@IndexView, BoxWidth=@BoxWidth, BoxHeight=@BoxHeight, BoxPointX=@BoxPointX, BoxPointY=@BoxPointY where CameraId=@CameraId;"; sqlCommand.Parameters.AddWithValue("@CameraIP", model.CameraIP); sqlCommand.Parameters.AddWithValue("@CameraType", model.CameraType); sqlCommand.Parameters.AddWithValue("@CameraUser", model.CameraUser); sqlCommand.Parameters.AddWithValue("@CameraPass", model.CameraPass); sqlCommand.Parameters.AddWithValue("@StreaURI", model.StreaURI); sqlCommand.Parameters.AddWithValue("@IndexView", model.IndexView); sqlCommand.Parameters.AddWithValue("@BoxWidth", model.BoxWidth); sqlCommand.Parameters.AddWithValue("@BoxHeight", model.BoxHeight); sqlCommand.Parameters.AddWithValue("@BoxPointX", model.BoxPointX); sqlCommand.Parameters.AddWithValue("@BoxPointY", model.BoxPointY); sqlCommand.Parameters.AddWithValue("@CameraId", model.CameraId); sqlCommand.ExecuteNonQuery(); } } InfoSettingFix.InfoSetting = this.GetCameraAPI(); } catch (Exception ex) { throw ex; } }
public async Task <ActionResult <CameraModel> > PostCameraModel(CameraModel cameraModel) { _context.Models.Add(cameraModel); await _context.SaveChangesAsync(); return(CreatedAtAction("GetCameraModel", new { id = cameraModel.Id }, cameraModel)); }
public void SetCameraID(CameraModel cameraModel, int ID, string name) { cameraModel.ID = ID; cameraModel.Name = name; cameraModel.CreateCameraInstance(); //cameraModel.camera.QueryFrame().Save($@"D:\Downloads\{ID}.JPG"); }
/// <summary> /// Allows to open Camera properties (it's a Aforge function that allows this) /// </summary> public void ShowCameraProperties(CameraModel camera) { if (this.IsCameraActivated) { camera.VideoSource.DisplayPropertyPage(IntPtr.Zero); } }
public void Init(CameraModel config) { _config = config; _camera.transform.localPosition = new Vector3(0f, _config.height, _config.roundRadius); _camera.transform.LookAt(Vector3.zero + Vector3.up * _config.lookAtHeight); }
public override void BindEntityModel(DisplayableEntityModel model) { base.BindEntityModel(model); this.model = base.model as CameraModel; this.model.lineRenders = GetComponentsInChildren <LineRenderer>().ToList(); this.model.camera = GetComponentInChildren <Camera>(); }
/// <summary> /// Writes the parameters from the given camera models into a text file. /// The format is that of COLMAP's "cameras.txt" file, and can be read directly. /// </summary> /// <param name="cameraModels"></param> The camera models to be written to file. /// <param name="workspace"></param> The workspace from which to work. public static void SaveCamerasInformation(CameraModel[] cameraModels, string workspace) { StringBuilder stringBuilder = new StringBuilder(); // Set up the file's header. stringBuilder.AppendLine("# Camera list with one line of data per camera:"); stringBuilder.AppendLine("# CAMERA_ID, MODEL, WIDTH, HEIGHT, PARAMS[]"); stringBuilder.AppendLine("# Number of cameras: " + GeneralToolkit.ToString(cameraModels.Length)); // Compute strings for each parameter. for (int iter = 0; iter < cameraModels.Length; iter++) { CameraModel cameraModel = cameraModels[iter]; string CAMERA_ID = GeneralToolkit.ToString(cameraModel.cameraReferenceIndex); string MODEL = cameraModel.modelName; string WIDTH = GeneralToolkit.ToString(cameraModel.pixelResolution.x); string HEIGHT = GeneralToolkit.ToString(cameraModel.pixelResolution.y); string PARAMS_FOCALLENGTH = GeneralToolkit.ToString(Camera.FieldOfViewToFocalLength(cameraModel.fieldOfView.x, cameraModel.pixelResolution.x)); string PARAMS_CENTERWIDTH = GeneralToolkit.ToString(cameraModel.pixelResolution.x / 2); string PARAMS_CENTERHEIGHT = GeneralToolkit.ToString(cameraModel.pixelResolution.y / 2); string line = CAMERA_ID + " " + MODEL + " " + WIDTH + " " + HEIGHT + " " + PARAMS_FOCALLENGTH + " " + PARAMS_CENTERWIDTH + " " + PARAMS_CENTERHEIGHT; stringBuilder.AppendLine(line); } // Write the header and parameters into a .txt file. File.WriteAllText(GetCamerasFile(workspace), stringBuilder.ToString()); }
/// <summary> /// Creates mesh assets for the depth image specified by the given index. /// </summary> /// <param name="acquisitionIndex"></param> The index of the depth image. /// <returns></returns> private void ProcessDepthImage(int acquisitionIndex) { // Check if the asset has already been processed. string bundledAssetName = dataHandler.GetBundledAssetName(this, perViewMeshAssetPrefix + GeneralToolkit.ToString(acquisitionIndex)); string meshRelativePath = Path.Combine(GeneralToolkit.tempDirectoryRelativePath, bundledAssetName + ".asset"); if (dataHandler.IsAssetAlreadyProcessed(meshRelativePath)) { return; } // Update the camera model. cameraModel = cameraSetup.cameraModels[acquisitionIndex]; // Initialize the distance map texture, and load the depth data into it. InitializeDistanceMap(); string imageName = cameraModel.imageName; string imagePath = Path.Combine(dataHandler.depthDirectory, imageName); GeneralToolkit.LoadTexture(imagePath, ref distanceMap); // Compute a mesh from the distance map. Mesh outMesh; ComputeMesh(out outMesh); // Save this mesh as an asset. AssetDatabase.CreateAsset(outMesh, meshRelativePath); AssetDatabase.Refresh(); // Store the per-view mesh into the final array. Mesh meshAsset = AssetDatabase.LoadAssetAtPath <Mesh>(meshRelativePath); perViewMeshes[acquisitionIndex] = (Mesh)Instantiate(meshAsset); }
/// <summary> /// Updates the preview camera with the camera model, and displays the rendered view in the preview window. /// </summary> /// <param name="useFullResolution"></param> Whether to use the full resolution (capture) or one limited for preview (preview window). public void UpdatePreviewCameraModel(bool useFullResolution) { // The preview camera manager, and its camera, need to have been initialized in a previous step. if (_previewCameraManager != null && _previewCameraManager.previewCamera != null) { // Update the preview camera's camera model, and render the preview image. CameraModel cameraParams = cameraSetup.cameraModels[cameraSetup.previewIndex]; _previewCameraManager.UpdateCameraModel(cameraParams, useFullResolution); _previewCameraManager.RenderPreviewToTarget(ref _previewCameraManager.targetTexture, false); int previewMaxIndex = cameraSetup.cameraModels.Length - 1; PreviewWindow.DisplayImage(_colorCallerName, _previewCameraManager.targetTexture, previewMaxIndex); // If depth data, or mesh data, is to be acquired, display a depth preview. if (_acquireDepthData || _copyGlobalMesh) { // Render actual depth into a precise depth texture. GeneralToolkit.CreateRenderTexture(ref _targetDepthTexture, cameraParams.pixelResolution, 24, RenderTextureFormat.RFloat, true, FilterMode.Point, TextureWrapMode.Clamp); _previewCameraManager.RenderPreviewToTarget(ref _targetDepthTexture, true); // Encode the depth texture into a color texture, using a colormap suited for visualization. if (_distanceToColorMat == null) { _distanceToColorMat = new Material(GeneralToolkit.shaderAcquisitionConvert01ToColor); } _distanceToColorMat.SetInt(shaderNameIsPrecise, 0); GeneralToolkit.CreateRenderTexture(ref _distanceAsColorTexture, cameraParams.pixelResolution, 0, RenderTextureFormat.ARGB32, true, FilterMode.Point, TextureWrapMode.Clamp); Graphics.Blit(_targetDepthTexture, _distanceAsColorTexture, _distanceToColorMat); // Display the texture in the preview window. PreviewWindow.DisplayImage(_depthCallerName, _distanceAsColorTexture, previewMaxIndex); // Reset the active render texture. RenderTexture.active = null; } } }
/// <summary> /// Fetches the global mesh and initializes the camera and materials. /// </summary> private void InitializePerCall() { // Deactivate any other renderer in the scene. _deactivatedRendererGOs = GeneralToolkit.DeactivateOtherActiveComponents <Renderer>(gameObject); // Create a preview camera manager and initialize it with the camera model's pose and parameters. _previewCameraModel = CameraModel.CreateCameraModel(); _previewCameraModel.transform.position = Vector3.zero; _previewCameraModel.transform.rotation = Quaternion.identity; _previewCameraModel.fieldOfView = 60f * Vector2.one; float focalLength = Camera.FieldOfViewToFocalLength(_previewCameraModel.fieldOfView.x, 1f); _previewCameraManager = new GameObject("Preview Camera Manager").AddComponent <PreviewCameraManager>(); Transform previewCameraTransform = new GameObject("Preview Camera").transform; GeneralToolkit.CreateRenderTexture(ref _previewCameraManager.targetTexture, Vector2Int.one, 0, RenderTextureFormat.ARGB32, false, FilterMode.Point, TextureWrapMode.Clamp); _previewCameraManager.CreatePreviewCamera(_previewCameraManager.gameObject, previewCameraTransform, _previewCameraModel); _previewCameraManager.previewCamera.clearFlags = CameraClearFlags.Color; _previewCameraManager.previewCamera.backgroundColor = Color.clear; // Create the materials. _renderToTextureMapMat = new Material(GeneralToolkit.shaderProcessingGlobalTextureMap); _renderToTextureMapMat.SetFloat(_shaderNameFocalLength, focalLength); _normalizeByAlphaMat = new Material(GeneralToolkit.shaderNormalizeByAlpha); // Initialize the helper object for ULR. _helperULR = gameObject.AddComponent <Rendering.Helper_ULR>(); _helperULR.Reset(); _helperULR.InitializeLinks(); _helperULR.blendCamCount = Rendering.Helper_ULR.maxBlendCamCount; _helperULR.numberOfSourceCameras = PMColorTextureArray.colorData.depth; _helperULR.CreateULRBuffersAndArrays(); _helperULR.InitializeBlendingMaterialParameters(ref _renderToTextureMapMat); _helperULR.currentBlendingMaterial = _renderToTextureMapMat; _helperULR.initialized = true; }
public ResetCameraCommand(Models models, CameraModel camera) { m_models = models; m_camera = camera; models.Settings.PropertyChanged += SettingsPropertyChanged; }
private async Task GetCountingAsync(ConfigModel config, CameraModel camera) { string[] texts = (await GetFile(config, camera, config.CountingUri, config.CountingBufferFileName)) .Split(new[] { "--myboundary" }, StringSplitOptions.RemoveEmptyEntries); var countings = texts.Select(x => x.Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) .Where(x => x.Length > 1) .Select(x => new CountingModel { CameraId = camera.Id, Gmt = ConvertToTime(x[3].Split(',')[5]), DateTime = ConvertToDateTime(string.Join(" ", x[3].Split(',').Take(2))), //RawData = string.Join(Environment.NewLine, x.Skip(4)), RawData = "Leave it empty", CountingDetails = x.Skip(4).Select(a => { var items = a.Split(',').Select(long.Parse).ToArray(); return(new CountingDetailModel { A = items[4], B = items[5] }); }).ToList() }).ToArray(); await _countingRepository.InsertAsync(countings); }
private static AuthenticationHeaderValue GetAuthenticationHeader(CameraModel camera) { var basicAuth = $"{camera.Username}:{camera.Password}"; var byteArray = Encoding.ASCII.GetBytes(basicAuth); return(new AuthenticationHeaderValue("Basic", Convert.ToBase64String(byteArray))); }
/// <summary> /// Thêm mới camera /// </summary> /// <param name="model"></param> /// <returns></returns> public void AddCamera(CameraModel model) { try { using (SqliteConnection db = new SqliteConnection(Constants.PathDatabase)) { db.Open(); using (SqliteCommand sqlCommand = new SqliteCommand()) { sqlCommand.Connection = db; sqlCommand.CommandText = "INSERT INTO ConfigCamera VALUES (NULL, @CameraIP, @CameraType, @CameraUser, @CameraPass, @StreaURI, @IndexView, @BoxWidth, @BoxHeight, @BoxPointX, @BoxPointY);"; sqlCommand.Parameters.AddWithValue("@CameraIP", model.CameraIP); sqlCommand.Parameters.AddWithValue("@CameraType", model.CameraType); sqlCommand.Parameters.AddWithValue("@CameraUser", model.CameraUser); sqlCommand.Parameters.AddWithValue("@CameraPass", model.CameraPass); sqlCommand.Parameters.AddWithValue("@StreaURI", model.StreaURI); sqlCommand.Parameters.AddWithValue("@IndexView", Int32.Parse(model.IndexView)); sqlCommand.Parameters.AddWithValue("@BoxWidth", (int)model.BoxWidth); sqlCommand.Parameters.AddWithValue("@BoxHeight", (int)model.BoxHeight); sqlCommand.Parameters.AddWithValue("@BoxPointX", (int)model.BoxPointX); sqlCommand.Parameters.AddWithValue("@BoxPointY", (int)model.BoxPointY); sqlCommand.ExecuteNonQuery(); } } InfoSettingFix.InfoSetting = this.GetCameraAPI(); } catch (Exception ex) { throw ex; } }
} // SaveCameras public void LoadCamera(CameraModel camera) { string path = String.Format(AppDomain.CurrentDomain.BaseDirectory + "\\cfg\\{0}.cam.json", camera.GUID); if (!File.Exists(path)) { camera.Name = "Camera " + camera.TrackerId; } else { TextReader reader = null; try { reader = new StreamReader(path); } finally { if (reader != null) { string fileContents = reader.ReadToEnd(); CameraModel tmp = JsonConvert.DeserializeObject <CameraModel>(fileContents); camera.Name = tmp.Name; camera.FPS = tmp.FPS; camera.Annotate = tmp.Annotate; reader.Close(); } } } } // LoadCameras
private static AuthenticationHeaderValue GetAuthenticationHeader(CameraModel camera) { var basicAuth = $"{camera.Username}:{camera.Password}"; var byteArray = Encoding.ASCII.GetBytes(basicAuth); return new AuthenticationHeaderValue("Basic", Convert.ToBase64String(byteArray)); }
public async Task <IActionResult> PutCameraModel(int id, CameraModel cameraModel) { if (id != cameraModel.Id) { return(BadRequest()); } _context.Entry(cameraModel).State = EntityState.Modified; try { await _context.SaveChangesAsync(); } catch (DbUpdateConcurrencyException) { if (!CameraModelExists(id)) { return(NotFound()); } else { throw; } } return(NoContent()); }
public static async Task<Stream> GetImageStreamAsync(CameraModel camera) { var http = new HttpClient(); http.DefaultRequestHeaders.Authorization = GetAuthenticationHeader(camera); return await http.GetStreamAsync(camera.Url); }
private async Task GetHeatMap(ConfigModel config, CameraModel camera) { string[] texts = (await GetFile(config, camera, config.HeatMapUri, config.HeatMapBufferFileName)) .Split(new[] { "--myboundary" }, StringSplitOptions.RemoveEmptyEntries); var heatmaps = texts.Select(x => x .Split(new[] { Environment.NewLine }, StringSplitOptions.RemoveEmptyEntries)) .Where(x => x.Length > 1) .Select(x => new { Gmt = ConvertToTime(x[3].Split(',')[5]), DateTime = ConvertToDateTime(string.Join(" ", x[3].Split(',').Take(2))), Raw = string.Join(Environment.NewLine, x.Skip(4)), Value = string.Join(Environment.NewLine, x.Skip(4)) .Split(new[] { Environment.NewLine, "," }, StringSplitOptions.RemoveEmptyEntries) .Select(long.Parse) .Where(a => a > 0).ToArray() }).Select(x => new HeatMapModel { CameraId = camera.Id, Gmt = x.Gmt, DateTime = x.DateTime, //RawData = x.Raw, RawData = "Leave it empty", TotalValue = x.Value.Sum(), TotalCount = x.Value.Length, Density = x.Value.Length > 0 ? (x.Value.Sum() / x.Value.Length) : 0, }).ToArray(); await _heatMapRepository.InsertAsync(heatmaps); }
/// <summary> /// </summary> /// <param name="cameraModel">Модель камеры</param> /// <param name="player">Позиция игрока</param> /// <param name="camera">Ссылка на MainCamera</param> /// <param name="inputController">ссылка на контроллер ввода</param> public CameraController(CameraModel cameraModel, Transform player, UnityCamera camera, InputController inputController) { _aimPosition = GameObject.FindGameObjectWithTag("AimPosition").transform; //Получаем модель для камеры. _cameraModel = cameraModel; _player = player; this.Camera = camera; //Получаем ссылку на контроллер ввода _inputController = inputController; //Вычисляем стартовое положение камеры var transform = player.transform; var position = transform.position; var startCameraDistance = position + -Vector3.forward * (cameraModel.CameraMinDistance + cameraModel.CameraMaxDistance / 2); //Задаем начальное расстояние между камерой и игроком _offset = position - startCameraDistance; //Устанавливаем камеру в начальное положение camera.transform.position = position - transform.rotation * _offset; }
private static CameraModelInfo GetCameraModelInfo(CameraModel model) { return(new CameraModelInfo { Names = model.Names, }); }
protected override void OnModelDestroy() { base.OnModelDestroy(); _sectionTracker.Clean(); _sectionTracker = null; _currentWaveInfo = null; _gameCamera = null; }
protected override void OnLoadStart() { settingModel = Model.First <SettingModel>(); playersModel = Model.First <PlayersModel>(); inGameplayModel = Model.First <InGamePlayModel>(); cameraModel = Model.First <CameraModel>(); SetLoadComplete(); }
public ICameraModel GetCameraModel(string producer, string make) { CameraModel cm = new CameraModel(); cm.Producer = producer; cm.Make = make; return(cm); }
private CameraModels GetCameraModels(CameraModel model, string modelId, Camera camera, SoftwareCameraInfo cameraInfo) { return(new CameraModels { Info = GetCameraInfo(model, cameraInfo, modelId), Models = new[] { GetCameraModelInfo(model) }, }); }
/// <summary> /// Parses camera parameters for a camera of the OMNIDIRECTIONAL type. Note that this type is not actually handled by COLMAP. /// </summary> /// <param name="split"></param> The split string from which to parse information. /// <returns></returns> A camera model containing the parsed parameters. private static CameraModel ParseOmnidirectional(string[] split) { // Parse basic parameters for an omnidirectional camera. CameraModel cameraModel = BasicParse(split, true); // Return the camera model. return(cameraModel); }
public ICameraModel GetCameraModel(string producer, string make) { ICameraModel c = new CameraModel(); c.Producer = producer; c.Make = make; return(c); }
static void CreateCameraModelFile(Camera mainCamera) { CameraModel model = new CameraModel(new Guid().ToString(), CAMERA_TYPE, "Camera", GetMatrixFromTransform(mainCamera.transform), mainCamera.fieldOfView, CAMERA_ZOOM, mainCamera.nearClipPlane, mainCamera.farClipPlane, CAMERA_FOCUS, 1.7f, CAMERA_FILM_GAUGE); var fullJson = cameraFile.Replace(CAMERA_OBJECT_BOOKMARK, JsonUtility.ToJson(model)); CreateFile(fullJson, Path.Combine(desktopPath, ROOT_FOLDER_NAME, SCENE_FOLDER_NAME, CAMERA_FILE_NAME)); }
public static async Task <Stream> GetImageStreamAsync(CameraModel camera) { var http = new HttpClient(); http.DefaultRequestHeaders.Authorization = GetAuthenticationHeader(camera); return(await http.GetStreamAsync(camera.Url)); }
private MainModel() { CameraModel = new CameraModel(); PlayerDataModel = new PlayerDataModel(); AvailablePlayers = new List<PlayerModel>(); ScreenState = ScreenState.PlayerSelection; ChoicesModel = new ChoicesModel(); SubjectModel = new SubjectModel(); ActiveLevelProgress = 0; }
/// <summary> /// Initializes the Controller /// </summary> public override void Initialize() { m_cameraModel = new CameraModel(GetComponent<Transform>(), PlayerTokenViewTransform, PcTokenViewTransform); base.Initialize(); }