private void pge_OnUpdate(object sender, FrameUpdateEventArgs frameUpdateArgs) { pge.Clear(Pixel.DARK_BLUE); if (runEmulation) { if (residualTime > 0.0f) { residualTime -= frameUpdateArgs.ElapsedTime; } else { residualTime += (1.0 / 60.0988) - frameUpdateArgs.ElapsedTime; handleControllerInputs(pge.Platform.KeyboardState); do { nesBus.clock(); playAudioWhenReady(); } while (!nesBus.PPU.FrameComplete); nesBus.PPU.FrameComplete = false; _frameCount++; if (DateTime.Now - dtStart >= TimeSpan.FromSeconds(1)) { dtStart = DateTime.Now; _fps = _frameCount; _frameCount = 0; } } } pge.DrawString(360, 2, $"FPS: {_fps}", Pixel.WHITE); // Draw rendered output pge.DrawSprite(0, 0, nesBus.PPU.GetScreen(), 1); pge.DrawString(0, 265, "X, Z - A, B", Pixel.WHITE); pge.DrawString(0, 280, "A, S - START, SELECT", Pixel.WHITE); // Draw Ram Page 0x00 //DrawRam(2, 2, 0x0000, 16, 16); //DrawRam(2, 182, 0x0100, 16, 16); //DrawCpu(416, 2); //DrawCode(416, 72, 26); //DrawOam(270, 10, 0, 8, true); //DrawOam(270, 140, 0, 32); //DrawOam(500, 140, 32, 32); // Draw Palettes & Pattern Tables //DrawPalettes(516, 340); // Draw selection recticle around selected palette //pge.DrawRect(516 + selectedPalette * (swatchSize * 5) - 1, 339, (swatchSize * 4), swatchSize, Pixel.WHITE); // Generate Pattern Tables //pge.DrawSprite(316, 10, nesBus.PPU.GetPatternTable(0, (byte)selectedPalette)); //pge.DrawSprite(448, 10, nesBus.PPU.GetPatternTable(1, (byte)selectedPalette)); }
private void Call_CallEvent(object sender, CallEventArgs args) { if (args.Type == CallEventType.ConfigurationComplete) { //STEP3: configuration completed -> try calling Call(); } else if (args.Type == CallEventType.ConfigurationFailed) { Error("Accessing audio / video failed"); } else if (args.Type == CallEventType.ConnectionFailed) { Error("ConnectionFailed"); } else if (args.Type == CallEventType.ListeningFailed) { Error("ListeningFailed"); } else if (args.Type == CallEventType.CallAccepted) { //STEP5: We are connected mState = SimpleCallState.InCall; Log("Connection established"); } else if (args.Type == CallEventType.CallEnded) { mState = SimpleCallState.Ended; Log("Call ended."); } else if (args.Type == CallEventType.FrameUpdate) { //STEP6: until the end of the call we receive frames here //Note that this is being called after Configure already for local frames even before //a connection is established! //This is triggered each video frame for local and remote video images FrameUpdateEventArgs frameArgs = args as FrameUpdateEventArgs; if (frameArgs.ConnectionId == ConnectionId.INVALID) { bool textureCreated = UnityMediaHelper.UpdateRawImage(_LocalImage, frameArgs.Frame); if (textureCreated) { Texture2D tex = _LocalImage.texture as Texture2D; Log("Local Texture(s) created " + tex.width + "x" + tex.height + " format: " + frameArgs.Frame.Format); } } else { bool textureCreated = UnityMediaHelper.UpdateRawImage(_RemoteImage, frameArgs.Frame); if (textureCreated) { Texture2D tex = _RemoteImage.texture as Texture2D; Log("Remote Texture(s) created " + tex.width + "x" + tex.height + " format: " + frameArgs.Frame.Format); } } } }
private void UpdateFrame(FrameUpdateEventArgs frameUpdateEventArgs) { if (frameUpdateEventArgs.IsRemote) { UpdateTexture(remoteVideo, remoteVideoFlipped, ref remoteVideoTexture, frameUpdateEventArgs.Frame, frameUpdateEventArgs.Format); } else { UpdateTexture(localVideo, localVideoFlipped, ref localVideoTexture, frameUpdateEventArgs.Frame, frameUpdateEventArgs.Format); } }
/// <summary> /// Handler of call events. /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void Call_CallEvent(object sender, CallEventArgs e) { switch (e.Type) { case CallEventType.CallAccepted: //Outgoing call was successful or an incoming call arrived Append("Connection established"); OnNewCall(e as CallAcceptedEventArgs); break; case CallEventType.CallEnded: OnCallEnded(e as CallEndedEventArgs); break; case CallEventType.ListeningFailed: Append("Failed to listen for incoming calls! Server might be down!"); ResetCall(); break; case CallEventType.ConnectionFailed: { //this should be impossible to happen in conference mode! Byn.Media.ErrorEventArgs args = e as Byn.Media.ErrorEventArgs; Append("Error: " + args.ErrorMessage); Debug.LogError(args.ErrorMessage); ResetCall(); } break; case CallEventType.FrameUpdate: //new frame received from webrtc (either from local camera or network) FrameUpdateEventArgs frameargs = e as FrameUpdateEventArgs; UpdateFrame(frameargs.ConnectionId, frameargs.Frame); break; case CallEventType.Message: { //text message received MessageEventArgs args = e as MessageEventArgs; Append(args.Content); break; } case CallEventType.WaitForIncomingCall: { //the chat app will wait for another app to connect via the same string WaitForIncomingCallEventArgs args = e as WaitForIncomingCallEventArgs; Append("Waiting for incoming call address: " + args.Address); break; } } }
protected override void UpdateFrame(FrameUpdateEventArgs frameUpdateEventArgs) { //base.UpdateFrame(frameUpdateEventArgs); Debug.Log("hi hi"); VideoTest.instance.DebugCall(3); if (VideoTest.instance) // && role == IOType.Receiver) { { VideoTest.instance.SetFrame(frameUpdateEventArgs.Frame, frameUpdateEventArgs.Format); } }
protected virtual void UpdateFrame(FrameUpdateEventArgs frameUpdateEventArgs) { //the avoid wasting CPU time the library uses the format returned by the browser -> ABGR little endian thus //the bytes are in order R G B A //Unity seem to use this byte order but also flips the image horizontally (reading the last row first?) //this is reversed using UI to avoid wasting CPU time //Debug.Log("frame update remote: " + frameUpdateEventArgs.IsRemote); if (frameUpdateEventArgs.IsRemote == true) { mUi.UpdateRemoteTexture(frameUpdateEventArgs.Frame, frameUpdateEventArgs.Format); } }
public override void Update_Active(object sender, FrameUpdateEventArgs e) { // Update damper power if (Gfx != null) { CarObject player = Gfx.FindObject("Player") as CarObject; if (player != null) { AnimKeyJump aniJump = player.GetAnimation("Jump") as AnimKeyJump; if (aniJump != null) { pgJumpRes.Value = aniJump.JumpReservoir / aniJump.JumpReservoirMax * 100; } } } // damper }
private void updateFrame(object sender, FrameUpdateEventArgs frameUpdateArgs) { pge.Clear(Pixel.BLUE); pge.PixelBlendMode = csPixelGameEngineCore.Enums.BlendMode.MASK; //testAnimation[1].CopyTo(pge.DefaultDrawTarget, 0, 0, -100, -100); //pge.DrawSprite(0, 0, testAnimation[1]); //pge.DrawDecal(new vec2d_f(), testAnimationDecal[1]); _rotation += _rotationStep % _fullCircle; pge.DrawRotatedDecal(new vec2d_f(testAnimationDecal[1].sprite.Width / 2.0f, testAnimationDecal[1].sprite.Height / 2.0f), testAnimationDecal[1], _rotation, new vec2d_f(testAnimationDecal[1].sprite.Width / 2.0f, testAnimationDecal[1].sprite.Height / 2.0f)); pge.DrawWarpedDecal(testAnimationDecal[1], new vec2d_f[] { new vec2d_f(400.0f, 200.0f), new vec2d_f(780.0f, 550.0f), new vec2d_f(10.0f, 500.0f), new vec2d_f(200.0f, 120.0f) }); showCursorPos(0, 20); showMouseWheelDelta(0, 30); showMouseButtonState(0, 40, 0); showMouseButtonState(0, 50, 1); showMouseButtonState(0, 60, 2); //pge.PixelBlendMode = csPixelGameEngineCore.Enums.BlendMode.NORMAL; //pge.DrawCircle(100, 100, 100, Pixel.RED); //pge.FillCircle(500, 500, 30, Pixel.GREEN); //pge.PixelBlendMode = csPixelGameEngineCore.Enums.BlendMode.ALPHA; //pge.FillTriangle(new vec2d_i(304, 200), // new vec2d_i(544, 381), // new vec2d_i(444, 500), // Pixel.MAGENTA); //pge.PixelBlendMode = csPixelGameEngineCore.Enums.BlendMode.NORMAL; //drawRandomPixels(); _curFrameCount++; if ((DateTime.Now - _dtStartFrame) >= TimeSpan.FromSeconds(1)) { _fps = _curFrameCount; _curFrameCount = 0; _dtStartFrame = DateTime.Now; } pge.DrawStringDecal(0, 10, $"FPS: {_fps}", Pixel.BLACK); }
private void Call_CallEvent(object sender, CallEventArgs args) { if (args.Type == CallEventType.ConfigurationComplete) { //STEP3: configuration completed -> try calling Call(); } else if (args.Type == CallEventType.ConfigurationFailed) { Error("Accessing audio / video failed"); } else if (args.Type == CallEventType.ConnectionFailed) { Error("ConnectionFailed"); } else if (args.Type == CallEventType.ListeningFailed) { Error("ListeningFailed"); } else if (args.Type == CallEventType.CallAccepted) { //STEP5: We are connected mState = SimpleCallState.InCall; Log("Connection established"); } else if (args.Type == CallEventType.CallEnded) { mState = SimpleCallState.Ended; Log("Call ended."); } else if (args.Type == CallEventType.FrameUpdate) { //STEP6: until the end of the call we receive frames here //Note that this is being called after Configure already for local frames even before //a connection is established! //This is triggered each video frame for local and remote video images FrameUpdateEventArgs frameArgs = args as FrameUpdateEventArgs; if (frameArgs.ConnectionId == ConnectionId.INVALID) { //invalid connection id means this is a local frame //copy the raw pixels into a unity texture bool textureCreated = UnityMediaHelper.UpdateTexture(ref mLocalVideo, frameArgs.Frame, frameArgs.Format); if (textureCreated) { if (_LocalImage != null) { _LocalImage.texture = mLocalVideo; } Log("Local Texture created " + frameArgs.Frame.Width + "x" + frameArgs.Frame.Height + " format: " + frameArgs.Format); } } else { //remote frame. For conference calls we would get multiple remote frames with different id's bool textureCreated = UnityMediaHelper.UpdateTexture(ref mRemoteVideo, frameArgs.Frame, frameArgs.Format); if (textureCreated) { if (_RemoteImage != null) { _RemoteImage.texture = mRemoteVideo; } Log("Remote Texture created " + frameArgs.Frame.Width + "x" + frameArgs.Frame.Height + " format: " + frameArgs.Format); } } } }