private static extern IntPtr FFmpegEncoder_StartLiveCapture(int width, int height, int rate, ProjectionType proj, StereoMode sm, string streamUrl, string ffpath);
public ProjectionBase() { _stereoMode = StereoMode.Mono; var defaultCameraPosition = new Vector3D(0, 0, 0); _cameraLeftPosition = defaultCameraPosition; _cameraRightPosition = defaultCameraPosition; }
/// <summary> /// Create a new recording with custom settings /// </summary> /// <param name="strFileName">File Name</param> /// <param name="samplerate">Sample rate</param> /// <param name="channels">Channels</param> /// <param name="bitrate">Bitrate</param> /// <param name="stereomode">Stereo mode</param> public PodcasterRecording(string strFileName, int samplerate, int channels, int bitrate, StereoMode stereomode) { if (!strFileName.ToLower().EndsWith(".mp3")) { // TODO: implement other codecs throw new Exception("Currently, Podcaster only works with MP3 files"); } _myRecProc = new RECORDPROC(RecordAudio); _recHandle = Bass.BASS_RecordStart(samplerate, channels, BASSFlag.BASS_RECORD_PAUSE, _myRecProc, IntPtr.Zero); lame = new EncoderLAME(_recHandle); lame.InputFile = null; lame.OutputFile = strFileName; lame.LAME_Bitrate = bitrate; switch (stereomode) { case StereoMode.JointStereo: lame.LAME_Mode = EncoderLAME.LAMEMode.JointStereo; break; case StereoMode.Mono: lame.LAME_Mode = EncoderLAME.LAMEMode.Mono; break; default: lame.LAME_Mode = EncoderLAME.LAMEMode.Stereo; break; } lame.LAME_TargetSampleRate = samplerate; lame.LAME_Quality = EncoderLAME.LAMEQuality.Quality; }
void Update() { if (this.stereoMode != this.prevStereoMode) { this.gameObject.layer = ToolbeltManager.FirstInstance.GetStereoLayer(this.stereoMode); this.prevStereoMode = this.stereoMode; } if (prevIcColour != icColour || prevMultiplyByParentColour != multiplyByParentColour) { UpdateEffectiveColour(); } if (this.renderer != this.prevRenderer) //when renderer loads, make sure to pass colour in. { this.prevRenderer = this.renderer; UpdateEffectiveColour(); } DoUpdate(); // We don't always want to create a MeshRenderer if it doesn't exist, // as icMaterial on empty GameObject can be used purely for colour parenting if (this.renderer) { this.renderer.material.mainTexture = this.GetRawTexture(); } // Do any render passes after the icMaterial has updates it's own stuff foreach (icRenderPass rp in this.renderPasses) { rp.RenderPass(); } }
private static extern IntPtr FFmpegEncoder_StartScreenshot( int width, int height, bool verticalFlip, bool horizontalFlip, ProjectionType projectionType, StereoMode stereoMode, string imagePath, string ffmpegPath);
internal static unsafe void SetStereoMode(this IPort port, StereoMode mode) { var stereo = new MmalParameterStereoscopicModeType( new MmalParameterHeaderType(MmalParameterStereoscopicMode, Marshal.SizeOf <MmalParameterStereoscopicModeType>()), mode.Mode, mode.Decimate, mode.SwapEyes); MmalCheck(MmalPort.SetParameter(port.Ptr, &stereo.Hdr), "Unable to set Stereo mode"); }
private static extern EncoderStatus GPUEncoder_SetVodCaptureSettings( int width, int height, int frameRate, int bitRate, string fullSavePath, bool is360, bool verticalFlip, bool horizontalFlip, ProjectionType projectionType, StereoMode stereoMode);
private static extern IntPtr FFmpegEncoder_StartLiveCapture( int width, int height, int bitrate, int frameRate, bool verticalFlip, bool horizontalFlip, ProjectionType projectionType, StereoMode stereoMode, string videoPath, string ffmpegPath);
void SwitchEyes() { ToolbeltManager tm = ToolbeltManager.FirstInstance; // Remove previous eye layer from culling mask this.camera.cullingMask = this.camera.cullingMask & ~(1 << tm.GetStereoLayer(this.curStereo)); this.curStereo = (this.curStereo == StereoMode.LEFT) ? StereoMode.RIGHT : StereoMode.LEFT; this.camera.cullingMask = this.camera.cullingMask | (1 << tm.GetStereoLayer(this.curStereo)); //Debug.Log(Time.frameCount.ToString() + " " + this.camera.cullingMask.ToString()); }
private static extern EncoderStatus GPUEncoder_SetLiveCaptureSettings( int width, int height, int frameRate, int bitRate, float flushCycleStart, float flushCycleAfter, string streamUrl, bool is360, bool verticalFlip, bool horizontalFlip, ProjectionType projectionType, StereoMode stereoMode);
void SetStereoMode(GameObject go, StereoMode sm) { icMaterial icMat = go.GetComponent<icMaterial>(); if (icMat) { icMat.stereoMode = sm; // Always true so we can adjust colour from the stereo parent icMat.multiplyByParentColour = true; } else { int layerNum = ToolbeltManager.FirstInstance.GetStereoLayer(sm); go.layer = layerNum; } }
public int GetStereoLayer(StereoMode stereoMode) { switch (stereoMode) { case StereoMode.LEFT: return(this.leftEyeLayer); case StereoMode.RIGHT: return(this.rightEyeLayer); default: return(0); } }
private void btnSwitch_Click(object sender, EventArgs e) { if (curMode == StereoMode.Wiggle) { curMode = StereoMode.CrossedEyes; CreateCrossedEyesVBOData(); } else { curMode = StereoMode.Wiggle; CreateAnimated3DVBOData(); } UpdateVBOs(); frmPicture_Resize(sender, e); }
void SetStereoMode(GameObject go, StereoMode sm) { icMaterial icMat = go.GetComponent <icMaterial>(); if (icMat) { icMat.stereoMode = sm; // Always true so we can adjust colour from the stereo parent icMat.multiplyByParentColour = true; } else { int layerNum = ToolbeltManager.FirstInstance.GetStereoLayer(sm); go.layer = layerNum; } }
///// <summary> ///// The resolution of the grid used to store the light probes in the frustum ///// </summary> //public Vector3Int lightProbesProxyGridResolution = new Vector3Int(16, 9, 16); #endregion #region Function /// <summary> /// Returns the frustum grid resolution, taking into account the camera stereo mode /// </summary> /// <param name="camera">The reference camera to look for stereo mode</param> /// <returns>The frustum grid resolution, resized according to the stereo mode of the camera</returns> public Vector3Int GetFrustumGridResolution(Camera camera) { StereoMode cameraStereoMode = camera.GetCameraStereoMode(); Vector3Int resolution = frustumGridResolution; if (enableAutomaticStereoResizing) { if (cameraStereoMode == StereoMode.MultiPass) { resolution.x /= 2; } else if (cameraStereoMode == StereoMode.SinglePass) { resolution.x *= 2; } } return(resolution); }
private void SwitchVideoStereoButton(StereoMode mode) { switch (mode) { case StereoMode.NONE: monoButton.SetEnable(); leftRightButton.SetDisable(); topBottomButton.SetDisable(); break; case StereoMode.LEFT_RIGHT: monoButton.SetDisable(); leftRightButton.SetEnable(); topBottomButton.SetDisable(); break; case StereoMode.TOP_BOTTOM: monoButton.SetDisable(); leftRightButton.SetDisable(); topBottomButton.SetEnable(); break; } }
/// <summary> /// Обработчик изменения типа дилэя. /// </summary> /// <param name="value">Нормированное новое значение параметра.</param> private void SetMode(float value) { var newMode = Converters.ToDelayMode(value); if (newMode != mode) { mode = newMode; switch (mode) { case StereoMode.StereoOffset: ChangeDelay(stereoOffsetDelay); break; case StereoMode.VariousTime: ChangeDelay(variousTimeDelay); break; case StereoMode.PingPong: ChangeDelay(pingPongDelay); break; } } }
// Set video stereo mode. public void SetStereoMode(StereoMode sm) { stereoMode = sm; SwitchStereoRenderer(); }
/// <summary>A string extension method that attempts to get stereo mode a StereoMode from the given string.</summary> /// <param name="source">The source.</param> /// <param name="mode">The stereo mode.</param> /// <returns>True if it succeeds, false if it fails.</returns> public static bool TryGetStereoMode(this string source, out StereoMode mode) { return(StereoModes.TryGetValue(source, out mode)); }
private static extern IntPtr FFmpegEncoder_StartScreenshot(int width, int height, ProjectionType proj, StereoMode sm, string path, string ffpath);
public List <SliceCameraDescription> CreateSliceCameras(float[] sliceSeams, GameObject parent, StereoCameraInterface stereoCameraInterface, Camera copySliceCameraFrom) { int numSlices; // number of slices visible to this projector int firstSeam = -1; int lastSeam = -1; float projStart = projectorMesh.MeshStart % 1.0f; float projEnd = projectorMesh.MeshEnd % 1.0f; // float projWidth = (projectorMesh.MeshStart > projectorMesh.MeshEnd) // ? (1.0f + projectorMesh.MeshEnd - projectorMesh.MeshStart) // : (projectorMesh.MeshEnd - projectorMesh.MeshStart); float[] contextSeams = new float[sliceSeams.Length]; // create a local version of slice seams that may be modified sliceSeams.CopyTo(contextSeams, 0); //Debug.Log("Projector Start: " + projStart.ToString() + ". Projector End: " + projEnd.ToString()); for (int i = 0; i < contextSeams.Length; i++) { float testLeftSeam = contextSeams[i]; // The 0.0 seam must also act as 1.0 if it is at the end. float testRightSeam = ((i + 1) == contextSeams.Length) ? 1.0f : contextSeams[i + 1]; if (testLeftSeam <= projStart && testRightSeam > projStart) { // Left Projector edge is between these two seams firstSeam = i; //Debug.Log("Found first seam: " + contextSeams[i].ToString()); } else if (testLeftSeam < projEnd && testRightSeam >= projEnd) { // Right Projector edge is between these two seams lastSeam = (i + 1) % contextSeams.Length; //Debug.Log("Found last seam: " + contextSeams[(i + 1) % contextSeams.Length].ToString()); } } if (firstSeam == -1 || lastSeam == -1) { // For some reason, couldn't find seams throw new Exception("Couldn't find slice seams on either side of the projector"); } if (lastSeam > firstSeam) { numSlices = (lastSeam - firstSeam) % contextSeams.Length; } else { // this projector wraps around the 0 seam //Debug.Log("This Projector touches the 0 seam, fixing values"); numSlices = (lastSeam + (contextSeams.Length - firstSeam)); for (int i = 0; i < contextSeams.Length; i++) { if (i >= 0 && i <= lastSeam) { contextSeams[i] += 1.0f; } } if (projEnd < projStart) { projEnd += 1.0f; } } // Make a camera per slice (clone the main camera's position and orientation) for (int i = 0; i < numSlices; i++) { float leftSeam = contextSeams[(firstSeam + i) % contextSeams.Length]; float rightSeam = contextSeams[(firstSeam + i + 1) % contextSeams.Length]; float decAngle = (rightSeam < leftSeam) ? (leftSeam + rightSeam + 1.0f) / 2.0f : (leftSeam + rightSeam) / 2.0f; float angle = decAngle * 360.0f; SliceCameraDescription sliceCamera = new SliceCameraDescription(i, angle, leftSeam, rightSeam, this, parent, copySliceCameraFrom); sliceCameras.Add(sliceCamera); // Add eye callback script to slice camera CameraEyeCallback callbackScript = sliceCamera.cameraObject.AddComponent <CameraEyeCallback>(); callbackScript.stereoCameraInterface = stereoCameraInterface; StereoMode stereoMode = this.stereoType == ProjectorStereoType.Left ? StereoMode.LEFT : StereoMode.RIGHT; callbackScript.stereoMode = stereoMode; Camera camera = sliceCamera.cameraObject.camera; StereoMode oppositeStereo = stereoMode == StereoMode.LEFT ? StereoMode.RIGHT : StereoMode.LEFT; camera.cullingMask = camera.cullingMask & ~(1 << ToolbeltManager.FirstInstance.GetStereoLayer(oppositeStereo)); } return(sliceCameras); }
/// <summary> /// Initialize the attributes of the capture session and start capture. /// </summary> public bool StartCapture() { if (_captureStarted) { Debug.LogWarningFormat(LOG_FORMAT, "Previous screenshot session not finish yet!"); OnError(this, CaptureErrorCode.SCREENSHOT_ALREADY_IN_PROGRESS); return(false); } if (string.IsNullOrEmpty(saveFolder)) { saveFolder = Config.saveFolder; } else { Config.saveFolder = saveFolder; } if (captureMode == CaptureMode._360) { if (projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "Projection type should be set for 360 capture, set type to equirect for generating texture properly"); projectionType = ProjectionType.EQUIRECT; } if (projectionType == ProjectionType.CUBEMAP) { if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Stereo settings not support for cubemap capture, reset to mono video capture."); stereoMode = StereoMode.NONE; } } CubemapSizeSettings(); } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } AntiAliasingSettings(); #if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN if (Config.isFreeTrial()) { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in free trial version, fall back to software encoding."); hardwareEncoding = false; } else if (!softwareEncodingOnly && gpuEncoder.instantiated && gpuEncoder.IsSupported()) { hardwareEncoding = true; } else { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in this device, fall back to software encoding."); } #endif #if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported on macOS system, fall back to software encoding."); hardwareEncoding = false; #endif if (hardwareEncoding) { // init hardware encoding settings GPUEncoderSettings(); if (!gpuEncoder.StartScreenShot()) { OnError(this, CaptureErrorCode.SCREENSHOT_START_FAILED); return(false); } } else { // init ffmpeg encoding settings FFmpegEncoderSettings(); if (!ffmpegEncoder.StartScreenShot()) { OnError(this, CaptureErrorCode.SCREENSHOT_START_FAILED); return(false); } } _captureStarted = true; // Start garbage collect thread. if (!garbageThreadRunning) { garbageThreadRunning = true; if (garbageCollectionThread != null && garbageCollectionThread.IsAlive) { garbageCollectionThread.Abort(); garbageCollectionThread = null; } garbageCollectionThread = new Thread(GarbageCollectionProcess); garbageCollectionThread.Priority = System.Threading.ThreadPriority.Lowest; garbageCollectionThread.IsBackground = true; garbageCollectionThread.Start(); } Debug.LogFormat(LOG_FORMAT, "Screen shot session started."); return(true); }
/// <summary> /// Initialize the attributes of the capture session and start capture. /// </summary> public bool StartCapture() { if (status != CaptureStatus.READY) { Debug.LogWarningFormat(LOG_FORMAT, "Previous video capture session not finish yet!"); OnError(this, CaptureErrorCode.VIDEO_CAPTURE_ALREADY_IN_PROGRESS); return(false); } if (!File.Exists(Config.ffmpegPath)) { Debug.LogErrorFormat(LOG_FORMAT, "FFmpeg not found, please follow document and add ffmpeg executable before start capture!"); OnError(this, CaptureErrorCode.FFMPEG_NOT_FOUND); return(false); } if (string.IsNullOrEmpty(saveFolder)) { saveFolder = Config.saveFolder; } else { Config.saveFolder = saveFolder; } if (captureMode == CaptureMode._360) { if (projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "Projection type should be set for 360 capture, set type to equirect for generating texture properly"); projectionType = ProjectionType.EQUIRECT; } if (projectionType == ProjectionType.CUBEMAP) { if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Stereo settings not support for cubemap capture, reset to mono video capture."); stereoMode = StereoMode.NONE; } } CubemapSizeSettings(); } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } if (frameRate < 18) { frameRate = 18; Debug.LogFormat(LOG_FORMAT, "Minimum frame rate is 18, set frame rate to 18."); } if (frameRate > 120) { frameRate = 120; Debug.LogFormat(LOG_FORMAT, "Maximum frame rate is 120, set frame rate to 120."); } AntiAliasingSettings(); if (captureAudio && offlineRender) { Debug.LogFormat(LOG_FORMAT, "Audio capture not supported in offline render mode, disable audio capture!"); captureAudio = false; } #if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN if (Config.isFreeTrial()) { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in free trial version, fall back to software encoding."); hardwareEncoding = false; } else if (!softwareEncodingOnly && gpuEncoder.instantiated && gpuEncoder.IsSupported()) { hardwareEncoding = true; } else { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in this device, fall back to software encoding."); } #endif #if UNITY_STANDALONE_OSX || UNITY_EDITOR_OSX Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported on macOS system, fall back to software encoding."); hardwareEncoding = false; #endif // Init ffmpeg audio capture if (!hardwareEncoding && captureAudio && !FFmpegMuxer.singleton) { AudioListener listener = FindObjectOfType <AudioListener>(); if (!listener) { Debug.LogFormat(LOG_FORMAT, "AudioListener not found, disable audio capture!"); captureAudio = false; } else { listener.gameObject.AddComponent <FFmpegMuxer>(); } } if (hardwareEncoding) { // init GPU encoding settings GPUEncoderSettings(); if (!gpuEncoder.StartCapture()) { OnError(this, CaptureErrorCode.VIDEO_CAPTURE_START_FAILED); return(false); } } else { // init ffmpeg encoding settings FFmpegEncoderSettings(); if (!ffmpegEncoder.StartCapture()) { OnError(this, CaptureErrorCode.VIDEO_CAPTURE_START_FAILED); return(false); } if (captureAudio) { // start ffmpeg audio encoding if (!FFmpegMuxer.singleton.captureStarted) { FFmpegMuxer.singleton.StartCapture(); } FFmpegMuxer.singleton.AttachVideoCapture(this); } } // Update current status. status = CaptureStatus.STARTED; // Start garbage collect thread. if (!garbageThreadRunning) { garbageThreadRunning = true; if (garbageCollectionThread != null && garbageCollectionThread.IsAlive) { garbageCollectionThread.Abort(); garbageCollectionThread = null; } garbageCollectionThread = new Thread(GarbageCollectionProcess); garbageCollectionThread.Priority = System.Threading.ThreadPriority.Lowest; garbageCollectionThread.IsBackground = true; garbageCollectionThread.Start(); } if (offlineRender) { // Backup maximumDeltaTime states. originalMaximumDeltaTime = Time.maximumDeltaTime; Time.maximumDeltaTime = Time.fixedDeltaTime; } Debug.LogFormat(LOG_FORMAT, "Video capture session started."); return(true); }
public void SetCurrentEye (StereoMode eye) { this._currentEye = eye; ToolbeltManager.FirstInstance.SetCurrentEye (eye); }
public bool StartCapture() { if (captureStarted) { Debug.LogWarningFormat(LOG_FORMAT, "Previous video capture manager session not finish yet!"); return(false); } // check all video capture is ready bool allReady = true; foreach (VideoCapture videoCapture in videoCaptures) { if (videoCapture.status != CaptureStatus.READY) { allReady = false; break; } } if (!allReady) { Debug.LogWarningFormat(LOG_FORMAT, "There is one or more video capture session still in progress!"); return(false); } if (!FFmpegConfig.IsExist()) { Debug.LogErrorFormat(LOG_FORMAT, "FFmpeg not found, please follow document and add ffmpeg executable before start capture!"); return(false); } saveFolder = Utils.CreateFolder(saveFolder); if (captureMode == CaptureMode._360) { if (projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "Projection type should be set for 360 capture, set type to equirect for generating texture properly"); projectionType = ProjectionType.EQUIRECT; } if (projectionType == ProjectionType.CUBEMAP) { if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Stereo settings not support for cubemap capture, reset to mono video capture."); stereoMode = StereoMode.NONE; } } } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } // start capture for all video capture foreach (VideoCapture videoCapture in videoCaptures) { // video capture settings videoCapture.startOnAwake = startOnAwake; videoCapture.captureTime = captureTime; videoCapture.quitAfterCapture = quitAfterCapture; videoCapture.captureMode = captureMode; videoCapture.projectionType = projectionType; // only VOD supported in multi capture videoCapture.captureType = CaptureType.VOD; videoCapture.saveFolder = saveFolder; videoCapture.resolutionPreset = resolutionPreset; videoCapture.frameWidth = frameWidth; videoCapture.frameHeight = frameHeight; videoCapture.frameRate = frameRate; videoCapture.bitrate = bitrate; videoCapture.stereoMode = stereoMode; videoCapture.interpupillaryDistance = interpupillaryDistance; videoCapture.cubemapFaceSize = cubemapFaceSize; videoCapture.offlineRender = offlineRender; videoCapture.captureAudio = captureAudio; videoCapture.captureMicrophone = captureMicrophone; videoCapture.deviceIndex = deviceIndex; videoCapture.antiAliasing = antiAliasing; videoCapture.gpuEncoding = gpuEncoding; videoCapture.encoderPreset = encoderPreset; videoCapture.StartCapture(); } captureStarted = true; return(true); }
public void SetVideoStereoMode(StereoMode mode) { vrVideoPlayer.SetStereoMode(mode); SwitchVideoStereoButton(mode); }
public void Execute() { _previousMode = _stereo.Mode; _previousVolume = _stereo.Volume; _stereo.Off(); }
public void SetCurrentEye(StereoMode stereoMode) { this.currentEye = stereoMode; }
/// <summary>A string extension method that attempts to get stereo mode a StereoMode from the given string.</summary> /// <param name="source">The source.</param> /// <param name="mode">The stereo mode.</param> /// <returns>True if it succeeds, false if it fails.</returns> public static bool TryGetStereoMode(this string source, out StereoMode mode) => StereoModes.TryGetValue(source, out mode);
private static bool TryGetStereoscopic(string layout, out StereoMode result) { return(StereoModes.TryGetValue(layout, out result)); }
/// <summary> /// Initialize the attributes of the capture session and start capture. /// </summary> public bool StartCapture() { if (_captureStarted) { Debug.LogWarningFormat(LOG_FORMAT, "Previous screenshot session not finish yet!"); OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.SCREENSHOT_ALREADY_IN_PROGRESS)); return(false); } saveFolderFullPath = Utils.CreateFolder(saveFolder); if (captureMode == CaptureMode._360) { if (projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "Projection type should be set for 360 capture, set type to equirect for generating texture properly"); projectionType = ProjectionType.EQUIRECT; } if (projectionType == ProjectionType.CUBEMAP) { if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Stereo settings not support for cubemap capture, reset to mono video capture."); stereoMode = StereoMode.NONE; } } CubemapSizeSettings(); } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } AntiAliasingSettings(); // init ffmpeg encoding settings FFmpegEncoderSettings(); #if UNITY_STANDALONE_WIN || UNITY_EDITOR_WIN if (gpuEncoding) { if (FreeTrial.Check()) { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in free trial version, fall back to software encoding."); gpuEncoding = false; } // init GPU encoding settings GPUEncoderSettings(); if (!gpuEncoder.instantiated || !gpuEncoder.IsSupported()) { Debug.LogFormat(LOG_FORMAT, "GPU encoding is not supported in current device or settings, fall back to software encoding."); gpuEncoding = false; } } #else if (gpuEncoding) { Debug.LogFormat(LOG_FORMAT, "GPU encoding is only available on windows system, fall back to software encoding."); gpuEncoding = false; } #endif if (gpuEncoding) { // init hardware encoding settings GPUEncoderSettings(); if (!gpuEncoder.StartScreenShot()) { OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.SCREENSHOT_START_FAILED)); return(false); } } else { if (!ffmpegEncoder.StartScreenShot()) { OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.SCREENSHOT_START_FAILED)); return(false); } } _captureStarted = true; // Start garbage collect thread. //if (!garbageThreadRunning) //{ // garbageThreadRunning = true; // if (garbageCollectionThread != null && // garbageCollectionThread.IsAlive) // { // garbageCollectionThread.Abort(); // garbageCollectionThread = null; // } // garbageCollectionThread = new Thread(GarbageCollectionProcess); // garbageCollectionThread.Priority = System.Threading.ThreadPriority.Lowest; // garbageCollectionThread.IsBackground = true; // garbageCollectionThread.Start(); //} Debug.LogFormat(LOG_FORMAT, "Screen shot session started."); return(true); }
/// <summary> /// Prepare capture settings. /// </summary> protected bool PrepareCapture() { if (status != CaptureStatus.READY) { Debug.LogWarningFormat(LOG_FORMAT, "Previous capture session not finish yet!"); OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.CAPTURE_ALREADY_IN_PROGRESS)); return(false); } if (!FFmpegConfig.IsExist()) { Debug.LogErrorFormat(LOG_FORMAT, "FFmpeg not found, please follow document and add ffmpeg executable before start capture!"); OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.FFMPEG_NOT_FOUND)); return(false); } if (captureSource == CaptureSource.RENDERTEXTURE) { if (inputTexture == null) { Debug.LogErrorFormat(LOG_FORMAT, "Input render texture not found, please attach input render texture!"); OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.INPUT_TEXTURE_NOT_FOUND)); return(false); } if (captureMode != CaptureMode.REGULAR) { Debug.LogFormat(LOG_FORMAT, "Capture from render texture only support REGULAR CaptureMode"); captureMode = CaptureMode.REGULAR; projectionType = ProjectionType.NONE; } if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Capture from render texture only support NONE StereoMode"); stereoMode = StereoMode.NONE; } frameWidth = inputTexture.width; frameHeight = inputTexture.height; } else if (captureSource == CaptureSource.SCREEN) { if (captureMode != CaptureMode.REGULAR) { Debug.LogFormat(LOG_FORMAT, "Capture from screen only support REGULAR CaptureMode"); captureMode = CaptureMode.REGULAR; projectionType = ProjectionType.NONE; } if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Capture from screen only support NONE StereoMode"); stereoMode = StereoMode.NONE; } if (captureCursor) { Cursor.SetCursor(cursorImage, Vector2.zero, CursorMode.ForceSoftware); } frameWidth = Screen.width; frameHeight = Screen.height; } else { ResolutionPresetSettings(); } // Some codec cannot handle odd video size frameWidth = Utils.GetClosestEvenNumber(frameWidth); frameHeight = Utils.GetClosestEvenNumber(frameHeight); if (captureType == CaptureType.LIVE) { if (string.IsNullOrEmpty(liveStreamUrl)) { Debug.LogWarningFormat(LOG_FORMAT, "Please input a valid live streaming url."); OnCaptureError(new CaptureErrorEventArgs(CaptureErrorCode.INVALID_STREAM_URI)); return(false); } } if (captureMode == CaptureMode._360) { if (projectionType == ProjectionType.NONE) { Debug.LogFormat(LOG_FORMAT, "Projection type should be set for 360 capture, set type to equirect for generating texture properly"); projectionType = ProjectionType.EQUIRECT; } if (projectionType == ProjectionType.CUBEMAP) { if (stereoMode != StereoMode.NONE) { Debug.LogFormat(LOG_FORMAT, "Stereo settings not support for cubemap capture, reset to mono video capture."); stereoMode = StereoMode.NONE; } } CubemapSizeSettings(); } else if (captureMode == CaptureMode.REGULAR) { // Non 360 capture doesn't have projection type projectionType = ProjectionType.NONE; } if (frameRate < 18) { frameRate = 18; Debug.LogFormat(LOG_FORMAT, "Minimum frame rate is 18, set frame rate to 18."); } if (frameRate > 120) { frameRate = 120; Debug.LogFormat(LOG_FORMAT, "Maximum frame rate is 120, set frame rate to 120."); } AntiAliasingSettings(); if (captureAudio && offlineRender) { Debug.LogFormat(LOG_FORMAT, "Audio capture not supported in offline render mode, disable audio capture!"); captureAudio = false; } // Save camera settings SaveCameraSettings(); if (transparent) { TransparentCameraSettings(); } ffmpegFullPath = FFmpegConfig.path; saveFolderFullPath = Utils.CreateFolder(saveFolder); lastVideoFile = ""; return(true); }
public int GetStereoLayer(StereoMode stereoMode) { switch (stereoMode) { case StereoMode.LEFT: return this.leftEyeLayer; case StereoMode.RIGHT: return this.rightEyeLayer; default: return 0; } }
public void Load3dDemo(string fileName, VideoCodec videoCodec, Hdr hdr, ColorSpace colorSpace, StereoMode stereoMode, ChromaSubSampling subSampling) { _mediaInfoWrapper = new MediaInfoWrapper(fileName, _logger); _mediaInfoWrapper.MediaInfoNotloaded.Should().BeFalse("InfoWrapper should be loaded"); _mediaInfoWrapper.HasVideo.Should().BeTrue("Video stream must be detected"); _mediaInfoWrapper.Is3D.Should().BeTrue("Video stream is 3D"); var video = _mediaInfoWrapper.VideoStreams[0]; video.Hdr.Should().Be(hdr); video.Codec.Should().Be(videoCodec); video.Stereoscopic.Should().Be(stereoMode); video.ColorSpace.Should().Be(colorSpace); video.SubSampling.Should().Be(subSampling); }