private IVideoRender GetVideoRender() { var test = Screen.AllScreens; IVideoRender render = new VideoRender(); return(render); }
private static void InnerShow(string absolutePath, int _displayMonitor) { ForeachVideoRenders((_videoRender, screen, index) => { if (_displayMonitor == index || _displayMonitor < 0) { if (_videoRender == null || _videoRender.RenderDisposed) { Execute.OnUIThread(() => { _videoRender = new VideoRender(); _videoRender.Init(screen); InnerApplyVideoAspect(_videoRender, VideoAspect, screen); bool ok = LiveWallpaperEngineManager.Show(_videoRender, screen); if (!ok) { LiveWallpaperEngineManager.Close(_videoRender); System.Windows.MessageBox.Show("巨应壁纸貌似不能正常工作,请关闭杀软重试"); } else { _videoRenders[index] = _videoRender; } }); } } _videoRender?.Play(absolutePath); if (index == _audioSourceMonitor) { _videoRender?.Mute(false); } }); }
private void Session_Disconnected(object sender, System.EventArgs e) { Debug.Log("Session Disconnected"); Connected = false; if (subscriber != null) { subscriber.Dispose(); subscriber = null; subscriberRender.Dispose(); subscriberRender = null; subscriberRenderer.Enabled = false; } if (publisher != null) { publisher.Dispose(); publisher = null; publisherRender.Dispose(); publisherRender = null; publisherRenderer.Enabled = false; } Context.Instance.Dispose(); Debug.Log("Object disposed"); }
public void Start(ulong senderId, int frameWidth, int frameHeight) { Stop(); _senderId = (uint)senderId; IRtcEngine engine = IRtcEngine.QueryEngine(); if (engine != null) { _videoRender = (VideoRender)engine.GetVideoRender(); _videoRender.SetVideoRenderMode(VIDEO_RENDER_MODE.RENDER_RAWDATA); _videoRender.AddUserVideoInfo(_senderId, 0); } _nativeTexture = new Texture2D(frameWidth, frameHeight, TextureFormat.RGBA32, false); OnUpdateTexture?.Invoke(_nativeTexture); _customTextureRenderer = new PluginTextureRenderer( UpdateRawTextureDataFunction, targetTexture: _nativeTexture, autoDispose: false ); CustomTextureRenderSystem.Instance.AddRenderer(_customTextureRenderer); }
/// <summary> /// 视频捕捉前事件 /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void VC_VideoCapturerBefore(object sender, VideoCapturedEventArgs e) { Console.WriteLine("bmiColors:" + e.BITMAPINFO.bmiColors.ToString()); Console.WriteLine("biSize:" + e.BITMAPINFO.bmiHeader.biSize.ToString() + " biSizeImage:" + e.BITMAPINFO.bmiHeader.biSizeImage.ToString() + " biBitCount:" + e.BITMAPINFO.bmiHeader.biBitCount.ToString() + " biWidth:" + e.BITMAPINFO.bmiHeader.biWidth.ToString() + " biHeight:" + e.BITMAPINFO.bmiHeader.biHeight.ToString() + " biClrUsed:" + e.BITMAPINFO.bmiHeader.biClrUsed.ToString() + " biClrImportant:" + e.BITMAPINFO.bmiHeader.biClrImportant.ToString() + " biXPelsPerMeter:" + e.BITMAPINFO.bmiHeader.biXPelsPerMeter.ToString() + " biYPelsPerMeter:" + e.BITMAPINFO.bmiHeader.biYPelsPerMeter.ToString() + " biPlanes:" + e.BITMAPINFO.bmiHeader.biPlanes.ToString() + " biCompression:" + e.BITMAPINFO.bmiHeader.biCompression.ToString()); if (VE == null) { VE = new VideoEncoder(e.BITMAPINFO, true); //根据摄像头采集数据的格式,创建新的视频编码器 } if (VR == null) { VR = new VideoRender(this.cRemote); VR.IniVideoRender(e.BITMAPINFO.bmiHeader); } if (VD == null) { VD = new VideoEncoder(e.BITMAPINFO, false); //初始化解码器 } //防止丢包,发送三次本地视频图像头信息给对方,以便对方解码器正确解码 // AVcommunication1.SendBITMAPINFOHEADER(e.BITMAPINFO.bmiHeader);//发送本地视频图像头信息给对方,以便对方解码器正确解码 // System.Threading.Thread.Sleep(300); // AVcommunication1.SendBITMAPINFOHEADER(e.BITMAPINFO.bmiHeader);//发送本地视频图像头信息给对方,以便对方解码器正确解码 // System.Threading.Thread.Sleep(300); // AVcommunication1.SendBITMAPINFOHEADER(e.BITMAPINFO.bmiHeader);//发送本地视频图像头信息给对方,以便对方解码器正确解码 }
private static void InnerApplyVideoAspect(VideoRender videoRender, string videoAspect, System.Windows.Forms.Screen screen) { if (string.IsNullOrEmpty(videoAspect)) { videoRender?.SetAspect($"{screen.Bounds.Width}:{screen.Bounds.Height}"); } else { videoRender?.SetAspect(videoAspect); } }
/// <summary> /// 设置对方视频格式事件 /// </summary> /// <param name="BITMAPINFO"></param> public void SetRemoteBITMAPINFOHEADER(BITMAPINFO BITMAPINFO) { if (VD == null) { VD = new VideoEncoder(BITMAPINFO, false);//创建视频解码器 } if (VR == null) { VR = new VideoRender(this.cRemote);//创建视频回显组件 VR.IniVideoRender(BITMAPINFO.bmiHeader); } }
private void Session_Connected(object sender, System.EventArgs e) { Debug.Log("Session Connected"); Connected = true; Debug.Log("Creating Publisher"); publisherRender = new VideoRender(publisherRenderer.rendererId); publisher = new Publisher(Context.Instance, renderer: publisherRender); publisher.StreamCreated += Publisher_StreamCreated; session.Publish(publisher); publisherRenderer.Enabled = true; }
/// <summary> /// 关闭 /// </summary> public void Close() { if (frameTransmit != null) { frameTransmit.Dispose(); frameTransmit = null; } if (VC != null) { VC.Close(); VC = null; } if (VE != null) { VE.Close(); VE = null; } if (VD != null) { VD.Close(); VD = null; } if (VR != null) { VR = null; } if (AC != null) { AC.Close(); AC = null; } if (AE != null) { AE.Close(); AE = null; } if (AR != null) { AR.Close(); AE = null; } if (cam != null) { cam.Stop(); cam.Dispose(); cam = null; timer1.Stop(); } //cLocal.Dispose(); cLocal = null; //cRemote.Dispose(); cRemote = null; //trackBarIn.Dispose(); trackBarIn = null; //trackBarOut.Dispose(); trackBarOut = null; }
private void Session_StreamReceived(object sender, Session.StreamEventArgs e) { if (subscriber != null) { return; // This sample can only handle one subscriber } Debug.LogFormat("Stream received {0}", e.Stream.Id); subscriberRender = new VideoRender(subscriberRenderer.rendererId); subscriber = new Subscriber(Context.Instance, e.Stream, subscriberRender); session.Subscribe(subscriber); subscriberRenderer.Enabled = true; }
//Begin rendering a video to the draw stack public VideoPlayer PlayVideo(string videoToLoad, string key, Sprite spriteObject) { //Load the passed in video and trigger it to play video = Content.Load <Video>(videoToLoad); //Create a new video render object and have it begin playing VideoRender videoRender = new VideoRender() { Render = spriteObject, Player = player }; videoRender.Player.Play(video); //Add the video object to the draw stack videoRender.Render.Texture = videoRender.Player.GetTexture(); videoDrawStack.Add(key, videoRender); //Return the video player object return(videoRender.Player); }
private void RenderPreview(MediaPlayer media) { int textureFrameCount = media.TextureProducer.GetTextureFrameCount(); if (textureFrameCount != _previewTextureFrameCount) { _previewTextureFrameCount = textureFrameCount; if (!_materialResolve) { _materialResolve = VideoRender.CreateResolveMaterial(); } if (!_materialIMGUI) { _materialIMGUI = VideoRender.CreateIMGUIMaterial(); } VideoRender.SetupMaterialForMedia(_materialResolve, media, -1); VideoRender.ResolveFlags resolveFlags = (VideoRender.ResolveFlags.ColorspaceSRGB | VideoRender.ResolveFlags.Mipmaps | VideoRender.ResolveFlags.PackedAlpha | VideoRender.ResolveFlags.StereoLeft); _previewTexture = VideoRender.ResolveVideoToRenderTexture(_materialResolve, _previewTexture, media.TextureProducer, resolveFlags); } }
private void btnVideo_Click(object sender, RoutedEventArgs e) { LiveWallpaperEngineManager.UIDispatcher = Dispatcher; using (var openFileDialog = new System.Windows.Forms.OpenFileDialog()) { openFileDialog.Filter = "All files (*.*)|*.*"; if (openFileDialog.ShowDialog() == System.Windows.Forms.DialogResult.OK) { ForeachVideoRenders((renderItem, screen) => { bool returnNew = false; if (renderItem == null || renderItem.RenderDisposed) { returnNew = true; renderItem = new VideoRender(); renderItem.Init(screen); bool ok = LiveWallpaperEngineManager.Show(renderItem, screen); if (!ok) { renderItem.CloseRender(); MessageBox.Show(ok.ToString()); } } string filePath = openFileDialog.FileName; renderItem.Play(filePath); if (returnNew) { return(renderItem); } return(null); }); } } }
private void UpdateGenerateThumbnail() { if (Time.renderedFrameCount == _lastFrame) { // In at least Unity 5.6 we have to force refresh of the UI otherwise the render thread doesn't run to update the textures this.Repaint(); UnityEditorInternal.InternalEditorUtility.RepaintAllViews(); return; } // Wait for a frame to be rendered Debug.Assert(_thumbnailPlayer != null); if (_thumbnailPlayer != null) { _timeoutTimer += Time.unscaledDeltaTime; bool nextVideo = false; _thumbnailPlayer.Update(); _thumbnailPlayer.Render(); if (_mediaFrame < 0 && _thumbnailPlayer.CanPlay()) { _thumbnailPlayer.MuteAudio(true); _thumbnailPlayer.Play(); _thumbnailPlayer.Seek(_thumbnailPlayer.GetDuration() * _thumbnailTime); _mediaFrame = _thumbnailPlayer.GetTextureFrameCount(); } if (_thumbnailPlayer.GetTexture() != null) { if (_mediaFrame != _thumbnailPlayer.GetTextureFrameCount() && _thumbnailPlayer.GetTextureFrameCount() > 3) { bool prevSRGB = GL.sRGBWrite; GL.sRGBWrite = false; RenderTexture rt2 = null; // TODO: move this all into VideoRender as a resolve method { Material materialResolve = new Material(Shader.Find(VideoRender.Shader_Resolve)); VideoRender.SetupVerticalFlipMaterial(materialResolve, _thumbnailPlayer.RequiresVerticalFlip()); VideoRender.SetupAlphaPackedMaterial(materialResolve, _thumbnailPlayer.GetTextureAlphaPacking()); VideoRender.SetupGammaMaterial(materialResolve, !_thumbnailPlayer.PlayerSupportsLinearColorSpace()); RenderTexture prev = RenderTexture.active; // Scale to fit and downsample rt2 = RenderTexture.GetTemporary(128, 128, 0, RenderTextureFormat.ARGB32, RenderTextureReadWrite.sRGB); RenderTexture.active = rt2; GL.Clear(false, true, new Color(0f, 0f, 0f, 0f)); ScaleMode scaleMode = ScaleMode.ScaleToFit; if (_zoomToFill) { scaleMode = ScaleMode.ScaleAndCrop; } VideoRender.DrawTexture(new Rect(0f, 0f, 128f, 128f), _thumbnailPlayer.GetTexture(), scaleMode, _thumbnailPlayer.GetTextureAlphaPacking(), materialResolve); RenderTexture.active = prev; Material.DestroyImmediate(materialResolve); materialResolve = null; } Texture2D readTexture = new Texture2D(128, 128, TextureFormat.RGBA32, true, false); Helper.GetReadableTexture(rt2, readTexture); MediaReference mediaRef = (this.targets[_targetIndex]) as MediaReference; mediaRef.GeneratePreview(readTexture); DestroyImmediate(readTexture); readTexture = null; RenderTexture.ReleaseTemporary(rt2); GL.sRGBWrite = prevSRGB; nextVideo = true; Debug.Log("Thumbnail Written"); } } if (!nextVideo) { // If there is an error or it times out, then skip this media if (_timeoutTimer > 10f || _thumbnailPlayer.GetLastError() != ErrorCode.None) { MediaReference mediaRef = (this.targets[_targetIndex]) as MediaReference; mediaRef.GeneratePreview(null); nextVideo = true; } } if (nextVideo) { BeginNextThumbnail(_targetIndex + 1); } } _lastFrame = Time.renderedFrameCount; }
/// <summary> /// Retrieve the next video frame to render /// </summary> /// <param name="itemToUpdate"></param> protected void UpdateVideo(VideoRender itemToUpdate) { //Update item to have the next frame to be drawn itemToUpdate.Render.Texture = itemToUpdate.Player.GetTexture(); }