private void SetRecordingTexture() { if (Screen.orientation == ScreenOrientation.Landscape || Screen.orientation == ScreenOrientation.LandscapeLeft || Screen.orientation == ScreenOrientation.LandscapeRight) { #if UNITY_IOS || UNITY_EDITOR if (MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) { recordingTexture = MergeCube.MergeCubeSDK.instance.GetHeadsetTexture(); } else { recordingTexture = new RenderTexture(1334, 750, 24); } recordingTexture.Create(); // #elif UNITY_ANDROID // if(MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) // { // recordingTexture = MergeCube.MergeCubeSDK.instance.GetHeadsetTexture(); // } #endif } else { #if UNITY_IOS || UNITY_EDITOR recordingTexture = new RenderTexture(750, 1334, 24); recordingTexture.Create(); // #elif UNITY_ANDROID // recordingTexture = new RenderTexture(432, 768, 24); #endif } #if UNITY_IOS || UNITY_EDITOR Camera.main.targetTexture = recordingTexture; // #elif UNITY_ANDROID // if (MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) { // Camera.main.targetTexture = recordingTexture; // } #endif #if UNITY_IOS MergeCubeSDK.instance.videoTexture.SetTexture("_Texture", recordingTexture); #endif #if UNITY_IOS || UNITY_EDITOR if (MergeCubeSDK.instance.viewMode == MergeCube.MergeCubeBase.ViewMode.FULLSCREEN) { fullscreenRecordingCamera.SetActive(true); } #endif #if UNITY_IOS // Debug.Log("Should have set a recording texture to: " + vidCapManager); vidCapManager.SetCustomRenderTexture(Camera.main.targetTexture); #endif }
private void SetRecordingTexture() { Camera.main.GetComponent <Blitter>().enabled = true; #if UNITY_IOS Debug.Log("Should have set a recording texture to: " + vidCapManager); vidCapManager.SetCustomRenderTexture(Camera.main.GetComponent <Blitter>().mainCameraTexture); #endif // #if UNITY_ANDROID // androidRecordingCamera.SetRenderTexture(Camera.main.GetComponent<Blitter>().mainCameraTexture); // #endif isInitialized = true; }
// This function is called when the "Start" button is pressed. public void StartVideoRecording() { if (isRecording) { return; } // Select the cameras and audio listener we're going to record. // We can record video from multiple cameras, but there is always just a // single audio listener we capture. if (mainCamera != null && (videoSourceState == VideoSource.Main_Camera || videoSourceState == VideoSource.Both_Cameras)) { // Record from the main camera and main listener. vrCap = (iVidCapProVideo)mainCamera.GetComponent <iVidCapProVideo>(); if (mainListener != null) { arCap = (iVidCapProAudio)mainListener.GetComponent <iVidCapProAudio>(); } // Enable video capture from this camera. vrCap.enabled = true; } if (secondaryCamera != null && (videoSourceState == VideoSource.Secondary_Camera || videoSourceState == VideoSource.Both_Cameras)) { // Record from the secondary camera. First, turn it on. secondaryCamera.enabled = true; vrCap = (iVidCapProVideo)secondaryCamera.GetComponent <iVidCapProVideo>(); // Use the secondary audio listener if we're recording solely from the secondary camera. if (secondaryListener != null && videoSourceState == VideoSource.Secondary_Camera) { arCap = (iVidCapProAudio)secondaryListener.GetComponent <iVidCapProAudio>(); if (mainListener != null) { // Now turn off the main audio listener. We want to use the secondary listener. // We keep the main camera enabled because it is used to show the user's view. mainListener.enabled = false; } secondaryListener.enabled = true; } // Enable video capture from this camera. vrCap.enabled = true; } isRecording = true; vrController.SetDebug(showDebug); // Set the desired video dimensions. //vidWidth = videoWidths[videoSizeState]; //vidHeight = videoHeights[videoSizeState]; vidWidth = (Screen.width * videoSizeMultiplier[(int)videoSizeState]) / videoSizeDivider[(int)videoSizeState]; vidHeight = (Screen.height * videoSizeMultiplier[(int)videoSizeState]) / videoSizeDivider[(int)videoSizeState]; // Testing full HD recording... //vidWidth = 1920; //vidHeight = 1080; // Testing tiny recording... //vidWidth = 128; //vidHeight =128; // Testing unsupported frame size. //vidWidth = 2048; //vidHeight = 1536; // Temporary hack - Limit video size to max allowed size for iPad3/4. // Preserve aspect ratio. if (vidWidth == 2048) { vidWidth = 1440; vidHeight = 1080; } else if (vidHeight == 2048) { vidHeight = 1440; vidWidth = 1080; } // Do we want to record the video from a custom rendertexture instead // of a camera? if (customRenderTexture != null && videoSourceState == VideoSource.Custom_RenderTexture) { // Set the rendertexture and override the UI specified frame size. vrController.SetCustomRenderTexture(customRenderTexture); vidWidth = customRenderTexture.width; vidHeight = customRenderTexture.height; if (mainListener != null) { arCap = (iVidCapProAudio)mainListener.GetComponent <iVidCapProAudio>(); } } else { // Be sure to reset custom rendertexture when we're not using it. vrController.SetCustomRenderTexture(null); } // Enable audio capture. if (arCap != null) { vrController.saveAudio = arCap; arCap.enabled = true; } // Register a delegate in case an error occurs during the recording session. vrController.RegisterSessionErrorDelegate(HandleSessionError); // Register a delegate to be called when the video is complete. vrController.RegisterSessionCompleteDelegate(HandleSessionComplete); // Configure video quality settings. This method call is optional. // You would only use it if you want to set the video compression settings // to non-default values. vrController.ConfigureVideoSettings(bitsPerSecond, keyframeInterval); // Configure gamma setting. This method call is optional. // You would only use it if you want to tweak the gamma setting for the // output video. Because gamma adjustment is computationally expensive, // do not invoke this function unless you really need it. if (gammaCorrection <= 0.9f || gammaCorrection >= 1.1f) { vrController.ConfigureGammaSetting(gammaCorrection); } else { vrController.ConfigureGammaSetting(-1.0f); } // Has audio recording from the scene been requested from the GUI? iVidCapPro.CaptureAudio audioSetting = iVidCapPro.CaptureAudio.No_Audio; if (arCap != null && (audioSourceState == AudioSource.Scene || audioSourceState == AudioSource.All)) { audioSetting = iVidCapPro.CaptureAudio.Audio; // audioSetting = iVidCapPro.CaptureAudio.Audio_Plus_Mic; } // Tell video recorder to begin a recording session and start recording. iVidCapPro.SessionStatusCode status = vrController.BeginRecordingSession( "SampleVideo", // file name; only relevant if saving to Documents vidWidth, vidHeight, // frame size captureFramerate, // frame rate; NOT USED when captureType is Unlocked audioSetting, // do we want to record audio captureType // type of capture (see docs for details) ); if (status == iVidCapPro.SessionStatusCode.OK) { // Display a message to tell the user recording is in progress. ShowMessage("Recording...", 9999.0f); } else { ShowMessage("Recording session failed! Reason: " + status, 15.0f); } }
//Remap: low2 + (value - low1) * (high2 - low2) / (high1 - low1) // targetAspect.x * (1920 / targetAspect.x) //recordingTexture = ( targetAspect.x / ( targetAspect.x / 1920 ) ) private void SetRecordingTexture() { if (Screen.orientation == ScreenOrientation.Landscape || Screen.orientation == ScreenOrientation.LandscapeLeft || Screen.orientation == ScreenOrientation.LandscapeRight) { #if UNITY_IOS || UNITY_EDITOR if (MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) { recordingTexture = MergeCube.MergeCubeSDK.instance.GetHeadsetTexture(); // Debug.Log( "E: " + recordingTexture.width + " X " + recordingTexture.height ); } else { if (targetAspect.x <= 1920) { // Debug.Log( "A: " + ( int )targetAspect.x + " x " + ( int )targetAspect.y ); recordingTexture = new RenderTexture(( int )targetAspect.x, ( int )targetAspect.y, 24, RenderTextureFormat.ARGB32); } else { // Debug.Log( "B: " + ( int )targetAspect.x / 2 + " x " + ( int )targetAspect.y / 2 ); recordingTexture = new RenderTexture(( int )targetAspect.x / 2, ( int )targetAspect.y / 2, 24, RenderTextureFormat.ARGB32); } } recordingTexture.Create(); // #elif UNITY_ANDROID // if(MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) // { // recordingTexture = MergeCube.MergeCubeSDK.instance.GetHeadsetTexture(); // } #endif } else { #if UNITY_IOS || UNITY_EDITOR if (targetAspect.x <= 1920) { // Debug.Log( "C: " + ( int )targetAspect.y + " x " + ( int )targetAspect.x ); recordingTexture = new RenderTexture(( int )targetAspect.y, ( int )targetAspect.x, 24, RenderTextureFormat.ARGB32); } else { // Debug.Log( "C: " + ( int )targetAspect.y / 2 + " x " + ( int )targetAspect.x / 2 ); recordingTexture = new RenderTexture(( int )targetAspect.y / 2, ( int )targetAspect.x / 2, 24, RenderTextureFormat.ARGB32); } recordingTexture.Create(); // #elif UNITY_ANDROID // recordingTexture = new RenderTexture(432, 768, 24); #endif } #if UNITY_IOS || UNITY_EDITOR Camera.main.targetTexture = recordingTexture; // #elif UNITY_ANDROID // if (MergeCube.MergeCubeSDK.instance.viewMode == MergeCubeBase.ViewMode.HEADSET) { // Camera.main.targetTexture = recordingTexture; // } #endif #if UNITY_IOS MergeCubeSDK.instance.videoTexture.SetTexture("_Texture", recordingTexture); #endif #if UNITY_IOS || UNITY_EDITOR if (MergeCubeSDK.instance.viewMode == MergeCube.MergeCubeBase.ViewMode.FULLSCREEN) { // Debug.Log( "FULLSCREEN!!!!" ); fullscreenRecordingCamera.SetActive(true); } #endif #if UNITY_IOS // Debug.Log("Should have set a recording texture to: " + vidCapManager); vidCapManager.SetCustomRenderTexture(Camera.main.targetTexture); #endif }