protected override bool SetEncoderDefaults(MixCastCamera cam) { if (cam == null) { return(false); } _uriOutput = fileName; _width = cam.Output.width; _height = cam.Output.height; if (context.Data != null) { _bitrateKbps = (ulong)context.Data.recordingData.bitrateFileRecording; } else { _bitrateKbps = (ulong)(BITRATE_DEFAULT_FILE_FACTOR * _width * _height / BITS_IN_KILOBIT); } //set the framerate from the Compositing framerate in Camera Settings UI Framerate = context.Data.outputFramerate == 0 ? MixCast.Settings.global.targetFramerate : context.Data.outputFramerate; _gopsize = Framerate * FILERECORD_GOPSIZE_FACTOR; return(true); }
private void HandleMixCastRenderEnded(MixCastCamera cam) { for (int i = 0; i < targets.Count; i++) { targets[i].enabled = !renderForMixCast; } }
protected override void StartEncoder(MixCastCamera cam) { base.StartEncoder(cam); #if !ENABLE_IL2CPP StartDiskSpaceMonitor(); #endif }
private void HandleMixCastRenderEnded(MixCastCamera cam) { for (int i = 0; i < targets.Count; i++) { targets[i].enabled = !(!string.IsNullOrEmpty(cam.context.Data.deviceName) ? renderForMixedReality : renderForThirdPerson); } }
protected virtual void Update() { if (context.Data == null) { return; } //transform.localScale = context.Data.displayData.scale * Vector3.one; switch (context.Data.displayData.mode) { case MixCastData.SceneDisplayData.PlacementMode.Camera: MixCastCamera target = MixCastCamera.FindCamera(context); if (target != null) { transform.position = target.displayTransform.position; transform.rotation = target.displayTransform.rotation; } break; case MixCastData.SceneDisplayData.PlacementMode.World: transform.localPosition = context.Data.displayData.position; transform.localRotation = context.Data.displayData.rotation; break; case MixCastData.SceneDisplayData.PlacementMode.Headset: break; } }
private void LateUpdate() { if (cam == null || cam.context.Data != context.Data || !cam.isActiveAndEnabled) { cam = MixCastCamera.FindCamera(context); } if (cam != null) { image.texture = cam.Output; //Comment out to remove memory allocation in editor //image.SetMaterialDirty(); if (image.texture != null) { if (fitter != null) { fitter.aspectRatio = (float)image.texture.width / image.texture.height; } } } else { image.texture = null; } }
protected virtual void StartEncoder(MixCastCamera cam) { StartEncodingTime = DateTime.Now; _encodedFrameCount = 0; _lastHeight = cam.Output.height; _lastWidth = cam.Output.width; if (encoderRunning) { return; } encoderRunning = false; if (_vidEnc != IntPtr.Zero && _vCfgEnc != IntPtr.Zero && _vTxfEnc != IntPtr.Zero && _audEnc != IntPtr.Zero && _aCfgEnc != IntPtr.Zero) { Debug.Log("Video encoder is not yet ready, it is still initialized from before"); EventCenter.HandleEvent(Category, EventCenter.Result.Error); return; } StartCoroutine(InitEncoder()); }
IEnumerator RenderUsedCameras() { while (isActiveAndEnabled) { UpdateTransform(); if (Time.unscaledTime >= nextRenderTime) { if (OnBeforeRender != null) { OnBeforeRender(); } for (int i = 0; i < MixCastCamera.ActiveCameras.Count; i++) { MixCastCamera cam = MixCastCamera.ActiveCameras[i]; if (cam.IsInUse) { cam.RenderScene(); } } nextRenderTime += 1f / MixCast.Settings.global.targetFramerate; } yield return(waitForEndOfFrame); } }
private void LateUpdate() { if (cam == null || cam.context.Data != context.Data || !cam.isActiveAndEnabled || feed == null || !feed.isActiveAndEnabled || feed.Texture == null) { cam = MixCastCamera.ActiveCameras.Find(c => c.context.Data == context.Data); if (cam != null) { feed = cam.GetComponentInChildren <InputFeed>(); } else { feed = null; } } if (feed != null) { image.texture = feed.Texture; image.SetMaterialDirty(); if (setScale && image.texture != null) { image.rectTransform.SetSizeWithCurrentAnchors(RectTransform.Axis.Horizontal, (float)image.texture.width / image.texture.height * image.rectTransform.rect.height); } } else { image.texture = null; } }
public void ApplyToFrame(MixCastCamera camera) { if (Texture == null) { return; } var renderTex = camera.Output as RenderTexture; if (!renderTex) { Debug.LogError("Attempting to blit texture to null render texture."); return; } Rect rect = CalculateRect(camera, renderTex, fullSize); GL.PushMatrix(); GL.LoadPixelMatrix(0, renderTex.width, renderTex.height, 0); Graphics.SetRenderTarget(renderTex); bool oldSRGB = GL.sRGBWrite; GL.sRGBWrite = true; if (Material != null) { Graphics.DrawTexture(rect, Texture, Material); } else { Graphics.DrawTexture(rect, Texture); } GL.sRGBWrite = oldSRGB; GL.End(); GL.PopMatrix(); }
private void HandleMixCastRenderEnded(MixCastCamera cam) { for (int i = 0; i < targets.Count; i++) { targets[i].enabled = !(cam.ActiveFeeds.Count > 0 ? renderForMixedReality : renderForThirdPerson); } }
protected override void RemoveCameras() { MixCastCamera cam = MixCastCamera.FindCamera(context); if (cam != null) { MixCast.StreamingCameras.Remove(cam.context.Data); } }
protected override void OnEnable() { if (cam == null) { cam = GetComponentInParent <MixCastCamera>(); } base.OnEnable(); HandleDataChanged(); }
private void ApplyWatermarks(MixCastCamera cam) { if (brandingBlit != null) { brandingBlit.ApplyToFrame(cam); } if (logoBlit != null && (MixCast.SecureSettings.IsFreeLicense || MixCast.Desktop.ShowingUI)) { logoBlit.ApplyToFrame(cam); } }
protected override void OnEnable() { if (cam == null) { cam = GetComponentInParent <MixCastCamera>(); } frames = new FrameDelayQueue <FramePlayerData>(); base.OnEnable(); Invoke("HandleDataChanged", 0.01f); }
protected void SendMixCastOutput(MixCastCamera cam, int duplicateFrameCount) { if (!encoderRunning) { return; } ResizeTexture(_width, _height); Graphics.Blit(cam.Output, _cameraOutputTexture); MixCastAV.encoderSetDuplicateFrameCount(_vCfgEnc, duplicateFrameCount); GL.IssuePluginEvent(MixCastAV.GetEncodeInterfaceRenderCallback(), encodeInterface); _encodedFrameCount += duplicateFrameCount; }
/// <summary> /// Info below is important, but not longer relevant here after greatly simplifying the original implementation of this class - /// Immediate camera and Buffered camera have different origin for RenderTexture. /// Immediate origin is in the center of its RenderTexture, Buffered is top left corner (the same as Quandrant). /// </summary> /// /// <returns>Rect for DrawTexture.</returns> private Rect CalculateRect(MixCastCamera camera, RenderTexture renderTex, bool drawFullSize) { Vector2 renderSize = new Vector2(renderTex.width, renderTex.height); Vector2 watermarkSize = GetWatermarkSize(renderSize, drawFullSize); Vector2 offset = GetOffset(renderSize); switch (texturePosition) { case Position.BottomLeft: return(new Rect(offset.x, renderSize.y - watermarkSize.y - offset.y, watermarkSize.x, watermarkSize.y)); case Position.BottomRight: return(new Rect(renderSize.x - watermarkSize.x - offset.x, renderSize.y - watermarkSize.y - offset.y, watermarkSize.x, watermarkSize.y)); case Position.Middle: return(new Rect(renderSize.x * 0.5f - watermarkSize.x * 0.5f, renderSize.y * 0.5f - watermarkSize.y * 0.5f, watermarkSize.x, watermarkSize.y)); default: return(new Rect(offset.x, renderSize.y - watermarkSize.y - offset.y, watermarkSize.x, watermarkSize.y)); } }
private void LateUpdate() { if (cam == null || cam.context.Data != context.Data || !cam.isActiveAndEnabled) { cam = MixCastCamera.ActiveCameras.Find(c => c.context.Data == context.Data); } if (cam != null) { image.texture = cam.Output; image.SetMaterialDirty(); if (setScale && image.texture != null) { image.rectTransform.SetSizeWithCurrentAnchors(RectTransform.Axis.Horizontal, (float)image.texture.width / image.texture.height * image.rectTransform.rect.height); } } else { image.texture = null; } }
protected override void OnEnable() { base.OnEnable(); // Force LibAvStuff's static constructor to run. This prevents the scenario where // accessing a LibAV function for the first time from inside a thread crashes the app. // This happens after enabling the "Begin recording on MixCast start" option. RuntimeHelpers.RunClassConstructor(typeof(MixCastAV).TypeHandle); MixCastCamera cam = MixCastCamera.FindCamera(context); Assert.IsNotNull(cam); if (SetEncoderDefaults(cam)) { StartCoroutine(Run()); } else { RemoveCameras(); } }
void UpdateMesh(InputFeed feed) { MixCastCamera cam = MixCastCamera.FindCamera(feed.context); if (cam == null) { return; } float playerDist = Mathf.Max(cam.gameCamera.nearClipPlane * (1f + NEAR_PLANE_PADDING), feed.CalculatePlayerDistance(cam.gameCamera)); float playerQuadHalfHeight = playerDist * Mathf.Tan(feed.context.Data.deviceFoV * 0.5f * Mathf.Deg2Rad); float playerQuadHalfWidth = playerQuadHalfHeight * feed.context.Data.deviceFeedWidth / feed.context.Data.deviceFeedHeight; vertBuffer[0] = new Vector3(-playerQuadHalfWidth, playerQuadHalfHeight, playerDist); vertBuffer[1] = new Vector3(playerQuadHalfWidth, playerQuadHalfHeight, playerDist); vertBuffer[2] = new Vector3(playerQuadHalfWidth, -playerQuadHalfHeight, playerDist); vertBuffer[3] = new Vector3(-playerQuadHalfWidth, -playerQuadHalfHeight, playerDist); projectionMesh.SetVertices(vertBuffer); projectionMesh.RecalculateBounds(); projectionMesh.UploadMeshData(false); }
void FillTrackingData(FrameDelayQueue <FramePlayerData> .Frame <FramePlayerData> frame) { MixCastCamera cam = MixCastCamera.FindCamera(context); if (cam != null && cam.gameCamera != null) { frame.data.playerDist = cam.gameCamera.transform.TransformVector(Vector3.forward).magnitude *CalculatePlayerDistance(cam.gameCamera); //Scale distance by camera scale } frame.data.playerHeadPos = GetTrackingPosition(TrackedDeviceManager.DeviceRole.Head); frame.data.playerBasePos = new Vector3(frame.data.playerHeadPos.x, 0, frame.data.playerHeadPos.z); frame.data.playerLeftHandPos = GetTrackingPosition(TrackedDeviceManager.DeviceRole.LeftHand); frame.data.playerRightHandPos = GetTrackingPosition(TrackedDeviceManager.DeviceRole.RightHand); if (MixCastCameras.Instance.RoomTransform != null) { Transform roomTransform = MixCastCameras.Instance.RoomTransform; frame.data.playerHeadPos = roomTransform.TransformPoint(frame.data.playerHeadPos); frame.data.playerBasePos = roomTransform.TransformPoint(frame.data.playerBasePos); frame.data.playerLeftHandPos = roomTransform.TransformPoint(frame.data.playerLeftHandPos); frame.data.playerRightHandPos = roomTransform.TransformPoint(frame.data.playerRightHandPos); } }
private void Update() { if (feed == null || !feed.isActiveAndEnabled) { return; } Vector3 groundOrigin = Vector3.zero; if (groundLayers == 0) { groundOrigin = Camera.main.transform.position; groundOrigin.y = 0; } else { RaycastHit hitInfo; if (UnityEngine.Physics.Raycast(Camera.main.transform.position, Vector3.down, out hitInfo, maxRayLength, groundLayers, QueryTriggerInteraction.Ignore)) { groundOrigin = hitInfo.point; } else { groundOrigin = Camera.main.transform.position + Vector3.down * maxRayLength; } } if (feed.blitMaterial != null && feed.blitMaterial.HasProperty(groundPositionParameter)) { MixCastCamera cam = MixCastCamera.FindCamera(feed.context); float output = 0; if (cam.gameCamera.transform.InverseTransformPoint(groundOrigin).z > 0) { output = Mathf.Clamp01(cam.gameCamera.WorldToViewportPoint(groundOrigin).y); } feed.blitMaterial.SetFloat(groundPositionParameter, output); } }
protected IEnumerator Run() { MixCastCamera cam = MixCastCamera.FindCamera(context); if (cam == null || cam.Output == null) { Debug.Log("No MixCast camera found"); } if (encoderRunning == false) { StartEncoder(cam); } while (isActiveAndEnabled) { bool running = false; if (_vidEnc != IntPtr.Zero && _vCfgEnc != IntPtr.Zero && _vTxfEnc != IntPtr.Zero && _audEnc != IntPtr.Zero && _aCfgEnc != IntPtr.Zero) { running = true; } if (!running) { yield return(null); } cam = MixCastCamera.FindCamera(context); bool hasOutput = cam != null && cam.Output != null; bool resized = hasOutput && (cam.Output.width != _lastWidth || cam.Output.height != _lastHeight); if (resized || (!hasOutput && running)) { StopEncoderAsync(_encoderInitLock); } if (resized || (hasOutput && !running)) { StartEncoder(cam); } //Encoding time management if (running) { float startTime = Time.unscaledTime; float timeDiff = 0; while (timeDiff < _frameDuration) { timeDiff = Time.unscaledTime - startTime + timeOvershot; //it has reached the frame duration, break out and encode if (timeDiff >= _frameDuration) { break; } yield return(null); } timeOvershot = timeDiff % _frameDuration; int frameCount = Math.Max(1, (int)(timeDiff / _frameDuration)); SendMixCastOutput(cam, frameCount); } } }
void CalculateCurrentLightsData(FrameLightingData lightingData) { MixCastCamera cam = MixCastCamera.FindCamera(feed.context); if (cam == null || cam.gameCamera == null) { return; } float playerDist = feed.CalculatePlayerDistance(cam.gameCamera); lightingData.directionalLightCount = 0; if (MixCast.ProjectSettings.specifyLightsManually) { foreach (Light light in MixCastLight.ActiveDirectionalLights) { if ((light.cullingMask & (1 << layerNum)) > 0 && LightIsAffectingPlayer(light, cam.gameCamera, playerDist)) { lightingData.directionalLightDirections[lightingData.directionalLightCount] = light.transform.forward; lightingData.directionalLightColors[lightingData.directionalLightCount] = light.color * light.intensity * MixCast.ProjectSettings.directionalLightPower * 0.5f; lightingData.directionalLightCount++; if (lightingData.directionalLightCount == DIR_LIGHT_ARRAY_MAX) { break; } } } } else { var directionalLights = Light.GetLights(LightType.Directional, layerNum); for (int i = 0; i < directionalLights.Length && lightingData.directionalLightCount < DIR_LIGHT_ARRAY_MAX; i++) { if (LightIsAffectingPlayer(directionalLights[i], cam.gameCamera, playerDist)) { lightingData.directionalLightDirections[lightingData.directionalLightCount] = directionalLights[i].transform.forward; lightingData.directionalLightColors[lightingData.directionalLightCount] = directionalLights[i].color * directionalLights[i].intensity * MixCast.ProjectSettings.directionalLightPower * 0.5f; lightingData.directionalLightCount++; } } } lightingData.pointLightCount = 0; if (MixCast.ProjectSettings.specifyLightsManually) { foreach (Light light in MixCastLight.ActivePointLights) { if ((light.cullingMask & (1 << layerNum)) > 0 && LightIsAffectingPlayer(light, cam.gameCamera, playerDist)) { lightingData.pointLightPositions[lightingData.pointLightCount] = light.transform.position; lightingData.pointLightPositions[lightingData.pointLightCount].w = light.range; lightingData.pointLightColors[lightingData.pointLightCount] = light.color * light.intensity * MixCast.ProjectSettings.pointLightPower * 0.5f; lightingData.pointLightCount++; if (lightingData.pointLightCount == POINT_LIGHT_ARRAY_MAX) { break; } } } } else { var pointLights = Light.GetLights(LightType.Point, layerNum); for (int i = 0; i < pointLights.Length && lightingData.pointLightCount < POINT_LIGHT_ARRAY_MAX; i++) { if (LightIsAffectingPlayer(pointLights[i], cam.gameCamera, playerDist)) { lightingData.pointLightPositions[lightingData.pointLightCount] = pointLights[i].transform.position; lightingData.pointLightPositions[lightingData.pointLightCount].w = pointLights[i].range; lightingData.pointLightColors[lightingData.pointLightCount] = pointLights[i].color * pointLights[i].intensity * MixCast.ProjectSettings.pointLightPower * 0.5f; lightingData.pointLightCount++; } } } //foundLights = Light.GetLights(LightType.Spot, layerNum); //lightingData.spotLightCount = 0; //for (int i = 0; i < foundLights.Length && lightingData.spotLightCount < SPOT_LIGHT_ARRAY_MAX; i++) //{ // if (LightIsAffectingPlayer(foundLights[i], cam.gameCamera, playerDist)) // { // lightingData.spotLightPositions[lightingData.spotLightCount] = foundLights[i].transform.position; // lightingData.spotLightPositions[lightingData.spotLightCount].w = foundLights[i].range; // lightingData.spotLightDirections[lightingData.spotLightCount] = foundLights[i].transform.forward; // lightingData.spotLightDirections[lightingData.spotLightCount].w = foundLights[i].spotAngle * Mathf.Deg2Rad * 0.5f; // lightingData.spotLightColors[lightingData.spotLightCount] = foundLights[i].color * foundLights[i].intensity * MixCast.ProjectSettings.spotLightPower * 0.5f; // lightingData.spotLightCount++; // } //} }
//abstract method to be overridden by derived classes //please define _width, _height, _gopsize, _bitrateKbps, _framerate // and _uriOutput if it hasn't been set yet protected abstract bool SetEncoderDefaults(MixCastCamera cam);
private void HandleMixCastRenderEnded(MixCastCamera cam) { target.alpha = restoreAlpha; }
private void HandleMixCastRenderStarted(MixCastCamera cam) { restoreAlpha = target.alpha; target.alpha = (!string.IsNullOrEmpty(cam.context.Data.deviceName) ? renderForMixedReality : renderForThirdPerson) ? 1 : 0; }
IEnumerator RunAsync() { MixCastCamera cam = MixCastCamera.FindCamera(context); if (cam == null) { ScreenshotError(); yield break; } RenderTexture srcTex = RenderTexture.GetTemporary(cam.Output.width, cam.Output.height, 0); Graphics.Blit(cam.Output, srcTex); //Distribute encoding so only one texture encodes per frame (since not threadable) encodeQueue.Add(this); yield return(new WaitForEndOfFrame()); while (encodeQueue[0] != this) { if (encodeQueue[0] == null) { encodeQueue.RemoveAt(0); //mechanism so 2nd instance still doesn't trigger same frame } yield return(null); // waits for next frame; used to spread out function over multiple frames } //Reserve file string finalFilename = MixCastFiles.GetAvailableFilename(filename); yield return(null); Texture2D tex = new Texture2D(cam.Output.width, cam.Output.height, TextureFormat.RGB24, false, QualitySettings.activeColorSpace == ColorSpace.Linear); yield return(null); RenderTexture.active = srcTex; tex.ReadPixels(new Rect(0, 0, tex.width, tex.height), 0, 0); RenderTexture.active = null; yield return(null); srcTex.Release(); srcTex = null; yield return(null); JPGEncoder encoder = new JPGEncoder(tex, 100, finalFilename); yield return(null); DestroyImmediate(tex); while (!encoder.isDone) { yield return(null); } encodeQueue[0] = null; //Release encoding lock EventCenter.HandleEvent( EventCenter.Category.Saving, EventCenter.Result.Success, string.Format("{0} {1}", Text.Localization.Get("Info_Saved_Screenshot"), finalFilename.Replace("/", "\\")), false ); }
protected override bool SetEncoderDefaults(MixCastCamera cam) { if (_vidEnc != IntPtr.Zero || _vCfgEnc != IntPtr.Zero || _vTxfEnc != IntPtr.Zero) { Debug.Log("Initializing encoder: encoder must be shut down first!"); EventCenter.HandleEvent(Category, EventCenter.Result.Error); return(false); } if (!Uri.IsWellFormedUriString(_uriOutput, UriKind.Absolute)) { Debug.LogError("OutputMixCastToNetwork has invalid url: " + _uriOutput); EventCenter.HandleEvent(Category, EventCenter.Result.Error, "Warning_Streaming_UrlInvalid"); return(false); } if (force1080p) { _width = WIDTH_1080P; _height = HEIGHT_1080P; } else if (force720p) { _width = WIDTH_720P; _height = HEIGHT_720P; } else if (cam != null && cam.Output != null) { _width = cam.Output.width; _height = cam.Output.height; } else { Debug.LogWarning("could not determine correct encoder output dimensions"); _width = WIDTH_720P; _height = HEIGHT_720P; } if (context.Data != null) { _bitrateKbps = (ulong)context.Data.recordingData.perCamStreamBitrate; } else { _bitrateKbps = (ulong)MixCast.Settings.global.defaultStreamBitrate; } //this is approximatly 1mbps for 1920x1080 video if (_bitrateKbps == 0) { _bitrateKbps = (ulong)(BITRATE_DEFAULT_STREAM_FACTOR * _width * _height / BITS_IN_KILOBIT); } //set the framerate from the Compositing framerate in Camera Settings UI Framerate = context.Data.outputFramerate == 0 ? MixCast.Settings.global.targetFramerate : context.Data.outputFramerate; _gopsize = Framerate * STREAMING_GOPSIZE_FACTOR; return(true); }