private IEnumerator RunDetection(BaseVideoPlayer videoPlayer) { while (!videoPlayer.IsVideoLoaded) { yield return(null); } // It isn't guaranteed that metadata is available before these conditions are met // in the Gvr Video Plugin. while (videoPlayer.CurrentPositionMilliseconds == 0 || !videoPlayer.IsPlaying) { yield return(null); } if (!videoPlayer.HasSphericalMetadata) { CompleteDetection(null); } else { StereoProjectionFormat format = new StereoProjectionFormat(); format.stereoMode = videoPlayer.MetadataStereoMode; // TODO: Proper parsing of projection box in GvrVideoPlugin. if (videoPlayer.HasProjectionMetadata) { format.projectionMode = BaseMediaPlayer.ProjectionMode.Projection360; } CompleteDetection(format); } }
private void OnFormatDetected(StereoProjectionFormat format) { // If there was no format detected, // Then use the default format. if (format == null) { format = new StereoProjectionFormat(); } CurrentAspectRatio = format.frameAspectRatio; CurrentStereoMode = format.stereoMode; bool changedProjection = false; if (CurrentProjectionMode != format.projectionMode) { CurrentProjectionMode = format.projectionMode; changedProjection = true; } // The screen is automatically re-initialized when the projection is detected, // make sure we don't init it twice. if (!changedProjection) { InitScreen(); } SaveCurrentFormat(); }
public override void ResetDetection() { base.ResetDetection(); formatFileInfo = null; formatDirectoryInfo = null; format = null; }
/// All subclasses of BaseStereoProjectionDetector MUST call this function /// when detection is finished. If unable to detect a format, pass null /// into this function. protected virtual void CompleteDetection(StereoProjectionFormat format) { if (resultCallback != null) { resultCallback(format); } ResetDetection(); }
private void OnFormatDetected(StereoProjectionFormat format) { if (queue.queueRunner != this) { return; } if (format == null) { currentIndex++; TryNextDetector(); return; } #if DEBUG_PROJECTION_DETECTOR_QUEUE Debug.Log("Detected Stereo Projection Format!"); #endif queue.CompleteDetection(format); }
public void SaveCurrentFormat() { StereoProjectionFormat format = savedStereoProjectionDetector.SavedFormat; if (CompareFormat(format)) { return; } if (format == null) { format = new StereoProjectionFormat(); } Debug.Log("Saving Stereo Projection Format."); format.projectionMode = CurrentProjectionMode; format.stereoMode = CurrentStereoMode; format.frameAspectRatio = CurrentAspectRatio; savedStereoProjectionDetector.SavedFormat = format; }
private IEnumerator AnalyzeImageCoroutine(Texture2D texture, Action <StereoProjectionFormat> callback) { // Wait a frame. yield return(null); // Blit the texture to a tiny render texture. Blit(texture); // Wait a few frames before reading the pixels to avoid pipeline stalls // and reduce the amount of work done on a single frame. for (int i = 0; i < FRAMES_AFTER_BLIT; i++) { yield return(new WaitForEndOfFrame()); } // Calculate the format based on the blitted image. StereoProjectionFormat format = CalculateFormat(texture); // Invoke callback. callback(format); PostAnalyzeImage(); }
private bool CompareFormat(StereoProjectionFormat format) { if (format == null) { return(false); } if (format.projectionMode != CurrentProjectionMode) { return(false); } if (format.stereoMode != CurrentStereoMode) { return(false); } if (format.frameAspectRatio != CurrentAspectRatio) { return(false); } return(true); }
private StereoProjectionFormat CalculateFormat(Texture2D originalTexture) { if (renderTexture == null) { return(null); } int width = renderTexture.width; int height = renderTexture.height; // Read the blitted texture. RenderTexture.active = renderTexture; Texture2D targetTexture = new Texture2D(width, height); targetTexture.ReadPixels(new Rect(0, 0, width, height), 0, 0); DisposeRenderTexture(); // Get the pixels from the texture. Color32[] pixels = targetTexture.GetPixels32(); // Calculate the lightest and darkest color values in the image. // This is used to normalize the differences between pixels. Vector3 darkest; Vector3 lightest; CalculateLightestAndDarkestColors(pixels, out lightest, out darkest); int halfHeight = height / 2; int halfWidth = width / 2; #if DEBUG_IMAGE_DETECTOR Texture2D fullTex = new Texture2D(width, height, TextureFormat.RGB24, false); Texture2D leftRightTex = new Texture2D(halfWidth, height, TextureFormat.RGB24, false); Texture2D topBottomTex = new Texture2D(width, halfHeight, TextureFormat.RGB24, false); #endif Vector3 leftRightSum = Vector3.zero; Vector3 topBottomSum = Vector3.zero; for (int y = 0; y < height; y++) { for (int x = 0; x < width; x++) { int pixelIndex = LocationToIndex(x, y, width, height); Color pixel = pixels[pixelIndex]; Vector3 normalizedPixel = NormalizeColorToRange(pixel, lightest, darkest); if (x < halfWidth) { int rightPixelIndex = LocationToIndex(x + halfWidth, y, width, height); Color rightPixel = pixels[rightPixelIndex]; Vector3 normalizedRightPixel = NormalizeColorToRange(rightPixel, lightest, darkest); Vector3 diffPixel = normalizedPixel - normalizedRightPixel; leftRightSum += SquaredVector(diffPixel); #if DEBUG_IMAGE_DETECTOR leftRightTex.SetPixel(x, y, pixel - rightPixel); #endif } if (y < halfHeight) { int bottomPixelIndex = LocationToIndex(x, y + halfHeight, width, height); Color bottomPixel = pixels[bottomPixelIndex]; Vector3 normalizedBottomPixel = NormalizeColorToRange(bottomPixel, lightest, darkest); Vector3 diffPixel = normalizedPixel - normalizedBottomPixel; topBottomSum += SquaredVector(diffPixel); #if DEBUG_IMAGE_DETECTOR topBottomTex.SetPixel(x, y, pixel - bottomPixel); #endif } #if DEBUG_IMAGE_DETECTOR fullTex.SetPixel(x, y, pixel); #endif } } #if DEBUG_IMAGE_DETECTOR string mediaPath = MediaPlayer.FilePath; mediaPath = Path.GetFileNameWithoutExtension(mediaPath); fullTex.Apply(); byte[] bytes = fullTex.EncodeToPNG(); string fullPath = Application.persistentDataPath + "/" + mediaPath + "Full.png"; File.WriteAllBytes(fullPath, bytes); leftRightTex.Apply(); bytes = leftRightTex.EncodeToPNG(); string leftRightPath = Application.persistentDataPath + "/" + mediaPath + "LeftRightDiff.png"; File.WriteAllBytes(leftRightPath, bytes); topBottomTex.Apply(); bytes = topBottomTex.EncodeToPNG(); string topBottomPath = Application.persistentDataPath + "/" + mediaPath + "TopBottomDiff.png"; File.WriteAllBytes(topBottomPath, bytes); Debug.Log("Wrote Image Diffs." + "\nFull=" + fullPath + "\nLeftRight=" + leftRightPath + "\nTopBottom=" + topBottomPath); #endif // Left Right int numPixelsLeftRight = halfWidth * height; Vector3 leftRightRatio = leftRightSum / numPixelsLeftRight; float leftRightSimilarityRatio = AverageValueOfVector(leftRightRatio); // Top Bottom int numPixelsTopBottom = width * halfHeight; Vector3 topBottomRatio = topBottomSum / numPixelsTopBottom; float topBottomSimilarityRatio = AverageValueOfVector(topBottomRatio); #if DEBUG_IMAGE_DETECTOR Debug.Log("Left/Right similarity ratio = " + leftRightSimilarityRatio); Debug.Log("Top/Bottom similarity ratio = " + topBottomSimilarityRatio); #endif StereoProjectionFormat format = new StereoProjectionFormat(); if (leftRightSimilarityRatio < SIMILARITY_THRESHOLD && leftRightSimilarityRatio < topBottomSimilarityRatio) { format.stereoMode = BaseMediaPlayer.StereoMode.LeftRight; } else if (topBottomSimilarityRatio < SIMILARITY_THRESHOLD) { format.stereoMode = BaseMediaPlayer.StereoMode.TopBottom; } format.frameAspectRatio = ImageBasedProjectionDetectorHelpers.CalculateFrameAspectRatio(originalTexture, format.stereoMode); if (format.frameAspectRatio >= SPHERICAL_THRESHOLD) { format.projectionMode = BaseMediaPlayer.ProjectionMode.Projection360; } #if DEBUG_IMAGE_DETECTOR Debug.Log("Frame Aspect Ratio = " + format.frameAspectRatio); #endif return(format); }
private void OnAnalyzeImage(StereoProjectionFormat format) { CompleteDetection(format); }