void Update() { if (applicationExiting) { return; } if (cameraTexture == null || predictionTexture == null || carController == null) { return; } ogmaneo.Vec2i pixelPos = new Vec2i(); // Remember currently active render texture RenderTexture currentActiveRT = RenderTexture.active; // Transfer the camera capture into the prediction texture (temporarily) RenderTexture.active = cameraTexture; predictionTexture.ReadPixels(new Rect(0, 0, _inputWidth, _inputHeight), 0, 0); predictionTexture.Apply(); // Restore active render texture RenderTexture.active = currentActiveRT; // Transfer the RGB camera texture into ValueField2D fields Color actualPixel = new Color(); Color yuvPixel = new Color(0.0f, 0.0f, 0.0f); for (int x = 0; x < _inputWidth; x++) { for (int y = 0; y < _inputHeight; y++) { actualPixel = predictionTexture.GetPixel(x, y); // SDTV (BT.601) Y'UV conversion yuvPixel.r = actualPixel.r * 0.299f + actualPixel.g * 0.587f + actualPixel.b * 0.114f; // Y' luma component // Chrominance // U = r * -0.14713 + g * -0.28886 + b * 0.436 //yuvPixel.g = 0.0f; // V = r * 0.615 + g * -0.51499 + b * -0.10001 //yuvPixel.b = 0.0f; predictionTexture.SetPixel(x, y, yuvPixel); } } // Edge Detection Convolution methods: // Laplacian of the Gaussian (LoG) - https://en.wikipedia.org/wiki/Blob_detection#The_Laplacian_of_Gaussian // - Sobel-Feldman and Sharr operators - https://en.wikipedia.org/wiki/Sobel_operator // - Prewitt operator - https://en.wikipedia.org/wiki/Prewitt_operator // Kirch operator - https://en.wikipedia.org/wiki/Kirsch_operator Texture2D horzTexture = ConvolutionFilter.Apply(predictionTexture, ConvolutionFilter.Sobel3x3Horizontal); // ConvolutionFilter.Prewitt3x3Horizontal); Texture2D vertTexture = ConvolutionFilter.Apply(predictionTexture, ConvolutionFilter.Sobel3x3Vertical); // ConvolutionFilter.Prewitt3x3Vertical); Texture2D convolvedTexture = new Texture2D(_inputWidth, _inputHeight, predictionTexture.format, false); Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int x = 0; x < _inputWidth; x++) { for (int y = 0; y < _inputHeight; y++) { Color horzPixel = horzTexture.GetPixel(x, y); Color vertPixel = vertTexture.GetPixel(x, y); tempPixel.r = Mathf.Sqrt((horzPixel.r * horzPixel.r) + (vertPixel.r * vertPixel.r)); tempPixel.g = tempPixel.r; // Mathf.Sqrt((horzPixel.g * horzPixel.g) + (vertPixel.g * vertPixel.g)); tempPixel.b = tempPixel.r; // Mathf.Sqrt((horzPixel.b * horzPixel.b) + (vertPixel.b * vertPixel.b)); convolvedTexture.SetPixel(x, y, tempPixel); } } predictionTexture.SetPixels(convolvedTexture.GetPixels()); predictionTexture.Apply(); // Transfer the RGB camera texture into ValueField2D fields for (int x = 0; x < _inputWidth; x++) { for (int y = 0; y < _inputHeight; y++) { actualPixel = predictionTexture.GetPixel(x, y); pixelPos.x = x; pixelPos.y = y; _inputField.setValue(pixelPos, actualPixel.r); previousImage[x, y] = sourceImage[x, y]; sourceImage[x, y] = actualPixel.r;// * 0.299f + actualPixel.g * 0.587f + actualPixel.b * 0.114f; } } // Encode scalar values from the car controller Steer = carController.CurrentSteerAngle / carController.m_MaximumSteerAngle; Accel = carController.AccelInput; Brake = carController.BrakeInput; pixelPos.x = 0; pixelPos.y = 0; _inputValues.setValue(pixelPos, Steer); // Setup the hierarchy input vector vectorvf inputVector = new vectorvf(); inputVector.Add(_inputField); inputVector.Add(_inputValues); // Step the hierarchy _hierarchy.activate(inputVector); if (Training) { _hierarchy.learn(inputVector); } // Grab the predictions vector vectorvf prediction = _hierarchy.getPredictions(); // Transfer the ValueField2D fields into the RGB prediction texture Color predictedPixel = new Color(); for (int x = 0; x < _inputWidth; x++) { for (int y = 0; y < _inputHeight; y++) { pixelPos.x = x; pixelPos.y = y; predictedPixel.r = prediction[0].getValue(pixelPos); predictedPixel.g = predictedPixel.r; // prediction[1].getValue(pixelPos); predictedPixel.b = predictedPixel.r; // prediction[2].getValue(pixelPos); predictionTexture.SetPixel(x, y, predictedPixel); predictedImage[x, y] = predictedPixel.r;// * 0.299f + predictedPixel.g * 0.587f + predictedPixel.b * 0.114f; } } predictionTexture.Apply(); // Wait for physics to settle if (_time < 1.0f) { _time += Time.deltaTime; // Apply hand brake carSteer = 0.0f; carAccel = 0.0f; carBrake = -1.0f; HandBrake = 1.0f; } else { // Release hand brake HandBrake = 0.0f; Accel = -1.0f; Brake = Accel; pixelPos.x = 0; pixelPos.y = 0; // Update the car controller PredictedSteer = prediction[1].getValue(pixelPos); PredictedAccel = Accel; PredictedBrake = Brake; carSteer = PredictedSteer;// * carController.m_MaximumSteerAngle; carAccel = PredictedAccel; carBrake = PredictedBrake; // Search along the spline for the closest point to the current car position float bestT = 0.0f, minDistance = 100000.0f; Vector3 carPosition = carController.gameObject.transform.localPosition; // When not training use the track spline BezierSpline spline = trackSpline; if (Training) { spline = splineList[SplineIndex]; } float totalDistance = 0.0f; for (float t = 0.0f; t <= 1.0f; t += 0.001f) { Vector3 position = spline.GetPoint(t); Vector3 positionPrev = spline.GetPoint(t - 0.001f); float distance = Vector3.Distance(position, carPosition); totalDistance += Vector3.Distance(position, positionPrev); if (distance <= minDistance) { minDistance = distance; bestT = t; } } // Reset car position and direction? if (Input.GetKeyUp(KeyCode.R) || carController.Collided) { if (ForcePredictionMode == false) { Training = true; } carController.ResetCollided(); // Spline 0 is usually set as the spline used to create the track SplineIndex = 0; Vector3 position = spline.GetPoint(bestT); carController.gameObject.transform.localPosition = position; Vector3 splineDirection = spline.GetDirection(bestT).normalized; carController.gameObject.transform.forward = -splineDirection; } // Determine the difference between the input image (t) and predicted image (t+1) CalculateNormalizedCrossCorrelation(); // Toggle training on iff too divergent? if (Training == false && ForcePredictionMode == false && NCC < 0.25f) { Training = true; } // Toggle training off iff quite confident? if (Training == true && NCC > 0.85f && LapCount >= initialTrainingLaps) { Training = false; } if (carController.CurrentSpeed < 2.0f) { Training = true; } if (Training) { _trainingCount++; } else { _predictingCount++; } if (Training && spline != null) { Vector3 carDirection = -carController.gameObject.transform.forward.normalized; Vector3 targetPosition = spline.GetPoint(bestT + SteerAhead / totalDistance); //Vector3 splineDirection = spline.GetDirection(bestT).normalized; Vector3 targetDirection = (targetPosition - carPosition).normalized; float angle = (1.0f - Vector3.Dot(carDirection, targetDirection));// * Mathf.Rad2Deg; Vector3 right = Vector3.Cross(carDirection, Vector3.up); float angle2 = Vector3.Dot(right, targetDirection); float newCarSteer = Mathf.Exp(256.0f * angle) - 1.0f; if (Mathf.Abs(minDistance) > 0.01f)//newCarSteer > Mathf.PI / 64.0f) { newCarSteer += angle2 * Mathf.Abs(minDistance); } if (angle2 > 0.0f) { newCarSteer = -newCarSteer; } if (newCarSteer > 1.0f) { newCarSteer = 1.0f; } else if (newCarSteer < -1.0f) { newCarSteer = -1.0f; } float steerBlend = 0.75f; carSteer = (steerBlend * newCarSteer) + ((1.0f - steerBlend) * carSteer); if (enableDebugLines) { debugLinePositions[0] = carController.gameObject.transform.localPosition; debugLinePositions[1] = debugLinePositions[0] + carDirection * 10.0f; debugLinePositions[2] = carController.gameObject.transform.localPosition; debugLinePositions[3] = debugLinePositions[2] + targetDirection * 10.0f; debugLine.SetPositions(debugLinePositions); } } float totalCount = _trainingCount + _predictingCount; if (totalCount == 0.0f) { TrainingPercent = 1.0f; PredictionPercent = 0.0f; } else { TrainingPercent = (float)_trainingCount / totalCount; PredictionPercent = (float)_predictingCount / totalCount; } if (bestT < prevBestT) { LapCount++; _trainingCount = 0; _predictingCount = 0; if ((LapCount % lapsPerSpline) == 0) { SplineIndex++; if (SplineIndex >= splineList.Length) { SplineIndex = 0; } } } prevBestT = bestT; } if (userControl) { // Control overides // pass the input to the car! float h = CrossPlatformInputManager.GetAxis("Horizontal"); float v = CrossPlatformInputManager.GetAxis("Vertical"); #if !MOBILE_INPUT float handbrake = CrossPlatformInputManager.GetAxis("Jump"); #endif carSteer = h; carAccel = v; carBrake = v; HandBrake = handbrake; } // Toggle training? if (Input.GetKeyUp(KeyCode.T)) { Training = !Training; ForcePredictionMode = false; } else // Force prediction mode? if (Input.GetKeyUp(KeyCode.F)) { Training = false; ForcePredictionMode = true; } // Save out the current state of the hierarchy? if (Input.GetKeyUp(KeyCode.O) && hierarchyFileName.Length > 0) { _hierarchy.save(_res.getComputeSystem(), hierarchyFileName); print("Saved OgmaNeo hierarchy to " + hierarchyFileName); } }
void Update() { if (applicationExiting) { return; } if (cameraTexture == null || predictionTexture == null || carController == null) { return; } // Remember currently active render texture RenderTexture currentActiveRT = RenderTexture.active; // Transfer the camera capture into the prediction texture (temporarily) RenderTexture.active = cameraTexture; predictionTexture.ReadPixels(new Rect(0, 0, _inputWidth, _inputHeight), 0, 0); predictionTexture.Apply(); // Restore active render texture RenderTexture.active = currentActiveRT; // Edge Detection Convolution methods: // - Canny - https://en.wikipedia.org/wiki/Canny_edge_detector // Laplacian of the Gaussian (LoG) - https://en.wikipedia.org/wiki/Blob_detection#The_Laplacian_of_Gaussian // - Sobel-Feldman and Sharr operators - https://en.wikipedia.org/wiki/Sobel_operator // - Prewitt operator - https://en.wikipedia.org/wiki/Prewitt_operator // Kirch operator - https://en.wikipedia.org/wiki/Kirsch_operator bool useSobel = false; bool useCanny = false && !useSobel; bool useBlur = false && !useCanny; // Canny already includes Gaussian blurring bool useThreholding = false; bool useGaborFilter = false; bool useLineSegmentDetector = true; bool useFastFeatureDetector = !useLineSegmentDetector; // Blur entire camera image? if (useBlur) { Texture2D blurredTexture = ConvolutionFilter.Apply(predictionTexture, ConvolutionFilter.GaussianBlur); predictionTexture.SetPixels(blurredTexture.GetPixels()); } // Convert from RGB space to Y'UV (ignoring chrominance) Color actualPixel = new Color(); Color yuvPixel = new Color(); for (int x = 0; x < _inputWidth; x++) { for (int y = 0; y < _inputHeight; y++) { actualPixel = predictionTexture.GetPixel(x, y); // SDTV (BT.601) Y'UV conversion yuvPixel.r = actualPixel.r * 0.299f + actualPixel.g * 0.587f + actualPixel.b * 0.114f; // Y' luma component // Chrominance // U = r * -0.14713 + g * -0.28886 + b * 0.436 yuvPixel.g = 0.0f; // V = r * 0.615 + g * -0.51499 + b * -0.10001 yuvPixel.b = 0.0f; predictionTexture.SetPixel(x, y, yuvPixel); } } int pixelPos; // Extract a portion of the camera image (half height) int yOffset = 16; // Set to 0 for bottom half, _hiddenHeight for top half int yHeight = _hiddenHeight; for (int y = yOffset; y < yOffset + yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = ((y - yOffset) * _hiddenWidth) + x; _inputField[pixelPos] = predictionTexture.GetPixel(x, y).r; } } if (useGaborFilter) { _openCV.GaborFilter(_inputField, 5, 4.0f, 0.0f, 10.0f, 0.5f, 0.0f); Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = 0; y < yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = (y * _hiddenWidth) + x; tempPixel.r = _inputField[pixelPos]; tempPixel.g = tempPixel.r; tempPixel.b = tempPixel.r; predictionTexture.SetPixel(x, y + yHeight, tempPixel); } } predictionTexture.Apply(); } if (useThreholding) { //_openCV.Threshold(_inputField, 0.0f, 255.0f, // eogmaneo.OpenCVInterop.CV_THRESH_TOZERO | eogmaneo.OpenCVInterop.CV_THRESH_OTSU); _openCV.AdaptiveThreshold(_inputField, 255.0f, eogmaneo.OpenCVInterop.CV_ADAPTIVE_THRESH_GAUSSIAN_C, eogmaneo.OpenCVInterop.CV_THRESH_BINARY, 5, 2); Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = 0; y < yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = (y * _hiddenWidth) + x; tempPixel.r = _inputField[pixelPos]; tempPixel.g = tempPixel.r; tempPixel.b = tempPixel.r; predictionTexture.SetPixel(x, y + yHeight, tempPixel); } } predictionTexture.Apply(); } if (useCanny) { _openCV.CannyEdgeDetection(_inputField, 50.0f, 50.0f * 3.0f); Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = 0; y < yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = (y * _hiddenWidth) + x; tempPixel.r = _inputField[pixelPos]; tempPixel.g = tempPixel.r; tempPixel.b = tempPixel.r; predictionTexture.SetPixel(x, y + yHeight, tempPixel); } } predictionTexture.Apply(); } if (useSobel) { // Make sure that Sobel input and output uses a signed pixel data type, // e.g. convert after to 8-bit unsigned // sobelx64f = cv2.Sobel(img, cv2.CV_64F, 1, 0, ksize = 5) // abs_sobel64f = np.absolute(sobelx64f) // sobel_8u = np.uint8(abs_sobel64f) Texture2D horzTexture = ConvolutionFilter.Apply(predictionTexture, ConvolutionFilter.Sobel3x3Horizontal); Texture2D vertTexture = ConvolutionFilter.Apply(predictionTexture, ConvolutionFilter.Sobel3x3Vertical); Texture2D convolvedTexture = new Texture2D(_inputWidth, _inputHeight, predictionTexture.format, false); Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = yOffset; y < yOffset + yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { Color horzPixel = horzTexture.GetPixel(x, y); Color vertPixel = vertTexture.GetPixel(x, y); tempPixel.r = Mathf.Sqrt((horzPixel.r * horzPixel.r) + (vertPixel.r * vertPixel.r)); tempPixel.g = tempPixel.r; // Mathf.Sqrt((horzPixel.g * horzPixel.g) + (vertPixel.g * vertPixel.g)); tempPixel.b = tempPixel.r; // Mathf.Sqrt((horzPixel.b * horzPixel.b) + (vertPixel.b * vertPixel.b)); convolvedTexture.SetPixel(x, (y - yOffset) + _hiddenHeight, tempPixel); pixelPos = ((y - yOffset) * _hiddenWidth) + x; _inputField[pixelPos] = (int)(tempPixel.r * 255.0f); } } predictionTexture.SetPixels(convolvedTexture.GetPixels()); predictionTexture.Apply(); } if (useLineSegmentDetector) { // Pass filtered image into the Line Segment Detector (optionally drawing found lines), // and construct the rotation SDR for passing into the hierarchy bool drawLines = true; _openCV.LineSegmentDetector(_inputField, _hiddenWidth, _hiddenHeight, 6, _rotationSDR, drawLines); if (drawLines) { // With drawLines enabled, the _inputField gets overriden with black background // pixels and detected white lines drawn ontop. // Transfer back into the predictionTexture for display (top half, bottom will show SDRs) Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = yOffset; y < yOffset + yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = ((y - yOffset) * _hiddenWidth) + x; tempPixel.r = _inputField[pixelPos]; tempPixel.g = tempPixel.r; tempPixel.b = tempPixel.r; predictionTexture.SetPixel(x, (y - yOffset) + _hiddenHeight, tempPixel); } } predictionTexture.Apply(); } } if (useFastFeatureDetector) { // Pass filtered image into the FAST Feature Detector (optionally drawing points found), // and construct the feature SDR for passing into the hierarchy bool drawPoints = true; _openCV.FastFeatureDetector(_inputField, _hiddenWidth, _hiddenHeight, 6, _rotationSDR, drawPoints, 0, 1, true); if (drawPoints) { // With drawPoints enabled, the _inputField gets overriden with black background // pixels and detected white points drawn ontop. // Transfer back into the predictionTexture for display (top half, bottom will show SDRs) Color tempPixel = new Color(0.0f, 0.0f, 0.0f); for (int y = yOffset; y < yOffset + yHeight; y++) { for (int x = 0; x < _hiddenWidth; x++) { pixelPos = ((y - yOffset) * _hiddenWidth) + x; tempPixel.r = _inputField[pixelPos]; tempPixel.g = tempPixel.r; tempPixel.b = tempPixel.r; predictionTexture.SetPixel(x, (y - yOffset) + _hiddenHeight, tempPixel); } } predictionTexture.Apply(); } } Color predictedPixel = new Color(); // Plot pre-encoder SDR output just underneath the input filtered image int onState = 0; for (int y = 16; y < 32; y++) { for (int x = 0; x < _inputWidth; x++) { if (x < _rotationSDR.Count) { predictedPixel.r = _rotationSDR[x]; if (y == 16) { onState += (int)predictedPixel.r; } } else { predictedPixel.r = 0.0f; } predictedPixel.g = predictedPixel.r; predictedPixel.b = predictedPixel.r; predictionTexture.SetPixel(x, y, predictedPixel); } } // Plot predicted SDR output at the bottom int ccState = 0; for (int y = 0; y < 16; y++) { for (int x = 0; x < _inputWidth; x++) { if (x < _rotationSDR.Count) { predictedPixel.r = _predictedSDR[x]; if (y == 0) { ccState += _rotationSDR[x] & _predictedSDR[x]; } } else { predictedPixel.r = 0.0f; } predictedPixel.g = predictedPixel.r; predictedPixel.b = predictedPixel.r; predictionTexture.SetPixel(x, y, predictedPixel); } } predictionTexture.Apply(); _onStates.Add(onState); _ccStates.Add(ccState); // Trim lists? if (_onStates.Count > _maxNumStates) { _onStates.RemoveAt(0); _ccStates.RemoveAt(0); } NCC = 0.0f; for (int i = 0; i < _onStates.Count; i++) { if (_ccStates[i] == 0 && _onStates[i] == 0) { NCC += 1.0f; } else if (_onStates[i] == 0) { NCC += 1.0f; } else { NCC += (float)_ccStates[i] / (float)_onStates[i]; } } NCC /= (float)_onStates.Count; // Encode scalar values from the car controller Steer = carController.CurrentSteerAngle / carController.m_MaximumSteerAngle; Accel = carController.AccelInput; Brake = carController.BrakeInput; //for (int i = 0; i < 6 * 6; i++) // _inputValues[i] = 0; //int index = (int)((Steer * 0.5f + 0.5f) * (6 * 6 - 1) + 0.5f); //_inputValues[index] = 1; _inputValues[0] = (int)((Steer * 0.5f + 0.5f) * (6.0f * 6.0f - 1.0f) + 0.5f); // Setup the hierarchy input vector Std2DVeci input = new Std2DVeci(); input.Add(_rotationSDR); input.Add(_inputValues); // Step the hierarchy _hierarchy.step(input, _system, Training); StdVeci predictions = _hierarchy.getPrediction(0); for (int i = 0; i < _predictedSDR.Count; i++) { _predictedSDR[i] = predictions[i]; } // Wait for physics to settle if (_time < 1.0f) { _time += Time.deltaTime; // Apply hand brake carSteer = 0.0f; carAccel = 0.0f; carBrake = -1.0f; HandBrake = 1.0f; } else { // Release hand brake HandBrake = 0.0f; Accel = -1.0f; Brake = Accel; // Update the car controller StdVeci steeringPredictions = _hierarchy.getPrediction(1); //int maxIndex = 0; //for (int i = 1; i < 6 * 6; i++) // if (steeringPredictions[i] > steeringPredictions[maxIndex]) // maxIndex = i; //PredictedSteer = (float)(maxIndex) / (float)(6 * 6 - 1) * 2.0f - 1.0f; PredictedSteer = (steeringPredictions[0] / (6.0f * 6.0f - 1.0f)) * 2.0f - 1.0f; PredictedAccel = Accel; PredictedBrake = Brake; carSteer = PredictedSteer; carAccel = PredictedAccel; carBrake = PredictedBrake; // Search along the spline for the closest point to the current car position float bestT = 0.0f, minDistance = 100000.0f; Vector3 carPosition = carController.gameObject.transform.localPosition; // When not training use the track spline BezierSpline spline = trackSpline; if (Training) { spline = splineList[SplineIndex]; } float totalDistance = 0.0f; for (float t = 0.0f; t <= 1.0f; t += 0.001f) { Vector3 position = spline.GetPoint(t); Vector3 positionPrev = spline.GetPoint(t - 0.001f); float distance = Vector3.Distance(position, carPosition); totalDistance += Vector3.Distance(position, positionPrev); if (distance <= minDistance) { minDistance = distance; bestT = t; } } // Assume +-2 units is maximum distance the car is allowed to be from the center spline NCC = Mathf.Max(0.0f, NCC - (1.0f - ((2.0f - Vector3.Distance(carPosition, spline.GetPoint(bestT))) / 2.0f))); //NCC = ((2.0f - Vector3.Distance(carPosition, spline.GetPoint(bestT))) / 2.0f); // Reset car position and direction? if (Input.GetKeyUp(KeyCode.R) || carController.Collided) { if (ForcePredictionMode == false) { Training = true; } carController.ResetCollided(); // Spline 0 is usually set as the spline used to create the track SplineIndex = 0; Vector3 position = spline.GetPoint(bestT); position.y = carController.gameObject.transform.localPosition.y; carController.gameObject.transform.localPosition = position; Vector3 splineDirection = spline.GetDirection(bestT).normalized; carController.gameObject.transform.forward = -splineDirection; } // Toggle training on iff too divergent? if (Training == false && ForcePredictionMode == false && NCC < 0.25f) { Training = true; } // Toggle training off iff quite confident? if (Training == true && NCC > 0.85f && LapCount >= initialTrainingLaps) { Training = false; } if (carController.CurrentSpeed < 2.0f) { Training = true; } if (Training) { _trainingCount++; } else { _predictingCount++; } if (Training && spline != null) { Vector3 carDirection = -carController.gameObject.transform.forward.normalized; Vector3 targetPosition = spline.GetPoint(bestT + (SteerAhead / totalDistance)); //Vector3 splineDirection = spline.GetDirection(bestT).normalized; Vector3 targetDirection = (targetPosition - carPosition).normalized; float angle = (1.0f - Vector3.Dot(carDirection, targetDirection));// * Mathf.Rad2Deg; Vector3 right = Vector3.Cross(carDirection, Vector3.up); float angle2 = Vector3.Dot(right, targetDirection); float newCarSteer = Mathf.Exp(256.0f * angle) - 1.0f; if (Mathf.Abs(minDistance) > 0.01f)//newCarSteer > Mathf.PI / 64.0f) { newCarSteer += angle2 * Mathf.Abs(minDistance); } if (angle2 > 0.0f) { newCarSteer = -newCarSteer; } if (newCarSteer > 1.0f) { newCarSteer = 1.0f; } else if (newCarSteer < -1.0f) { newCarSteer = -1.0f; } float steerBlend = 0.5f; carSteer = (steerBlend * newCarSteer) + ((1.0f - steerBlend) * carSteer); if (enableDebugLines) { debugLinePositions[0] = carController.gameObject.transform.localPosition; debugLinePositions[1] = debugLinePositions[0] + carDirection * 10.0f; debugLinePositions[2] = carController.gameObject.transform.localPosition; debugLinePositions[3] = debugLinePositions[2] + targetDirection * 10.0f; debugLine.SetPositions(debugLinePositions); } } float totalCount = _trainingCount + _predictingCount; if (totalCount == 0.0f) { TrainingPercent = 1.0f; PredictionPercent = 0.0f; } else { TrainingPercent = (float)_trainingCount / totalCount; PredictionPercent = (float)_predictingCount / totalCount; } if (bestT < prevBestT) { LapCount++; _trainingCount = 0; _predictingCount = 0; if ((LapCount % lapsPerSpline) == 0) { SplineIndex++; if (SplineIndex >= splineList.Length) { SplineIndex = 0; } } } prevBestT = bestT; } if (connectToNeoVis && _neoVis != null) { _neoVis.update(0.01f); } if (userControl) { // Control overides // pass the input to the car! float h = CrossPlatformInputManager.GetAxis("Horizontal"); float v = CrossPlatformInputManager.GetAxis("Vertical"); #if !MOBILE_INPUT float handbrake = CrossPlatformInputManager.GetAxis("Jump"); #endif carSteer = h; carAccel = v; carBrake = v; HandBrake = handbrake; } // Toggle training mode? if (Input.GetKeyUp(KeyCode.T)) { Training = !Training; ForcePredictionMode = false; } else // Force prediction mode? if (Input.GetKeyUp(KeyCode.F)) { Training = false; ForcePredictionMode = true; } // Save out the current state of the hierarchy? if (Input.GetKeyUp(KeyCode.O) && hierarchyFileName.Length > 0) { _hierarchy.save(hierarchyFileName); print("Saved OgmaNeo hierarchy to " + hierarchyFileName); } }