private void CloseCurrentApp() { richTextBox1.Text = Resources.MainWindow_CloseCurrentApp_Loading___; if (_currentApp != null) { _currentApp.DeInit(); } // Clean up _currentApp = null; if (_currentControl != null) { _currentControl.HandleCreated -= renderControl_HandleCreated; _currentControl.Dispose(); splitContainer1.Panel2.Controls.Remove(_currentControl); _currentControl = null; } if (_currentHost != null) { _currentHost.Dispose(); _currentHost = null; } // Just in case... GC.Collect(); GC.WaitForPendingFinalizers(); }
public KinectTester() { InitializeComponent(); sensor = new SmartKinectSensor(); renderCanvas = new RenderCanvas(TimeSpan.FromSeconds(10)); sensor.EnableSkeletonRenderingSensor(); renderCanvas.ImageRendered += DisplayRenderedImage; capturedLabel.Visible = false; jointController = new JointController(); gestureController = new GestureController(); skeletonRender = new SkeletonRenderer(sensor.Sensor); sensor.SkeletonController.AddFunction(jointController); sensor.SkeletonController.AddFunction(gestureController); sensor.SkeletonController.AddFunction(skeletonRender); skeletonRender.SkeletonRendered += renderCanvas.SkeletonFrameCaptured; gestureController.GestureCaptured += GestureCaptured; jointController.JointTrackingCaptured += JointTrackingCaptured; ArduinoSerialPort serialPort = new ArduinoSerialPort("COM5", 115200); serialPort.Open(); bodyTracker = new KinectBodyTracker(sensor, serialPort); GestureBuilderForm test = new GestureBuilderForm(sensor); test.Show(); // InitializeArduino(); }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); } _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); //Util.Log("Render HID=" + rp.HierarchyID + " ID=" + this.ID + "\n"); // make a new canvas - anything painted on here by children, will // be used in this SOundEvent. RenderCanvas myCanvas = new RenderCanvas(); // Render to rp/myCanvas, to retrieve adapted render-params based on possible // effects attached to me as child nodes. RenderChildren(_rp, myCanvas); // render my audio ... if within the right time if (_rp.Time >= 0 && _rp.Time < Duration) { _audio.Render(_rp, myCanvas, _dspList, _audioRepeats); canvas.TimeMarker = myCanvas.TimeMarker; return(true); } return(false); }
FMOD.Channel PlayOsc(RenderParams rp, RenderCanvas canvas) { FMOD.Channel channel = null; FMOD.RESULT r = MusicEngine.AudioEngine.playDSP(FMOD.CHANNELINDEX.FREE, _dsp, true, ref channel); if (r == FMOD.RESULT.OK && channel != null) { // set regular play properties AdaptChannelSettings(channel, rp, canvas); // set optional DSP unit(s) on channel if (r == FMOD.RESULT.OK) { FMOD.DSPConnection conn = null; foreach (FMOD.DSP d in _dspList) { r = channel.addDSP(d, ref conn); // TODO errcheck } // go - start playing if (r == FMOD.RESULT.OK) { r = channel.setPaused(false); } } } // if return(channel); }
private void ParseData() { _data = InputData.Text; _isParsing = true; RenderCanvas.Invalidate(); _isParsing = false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); } RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); // enable dsp effect only when in right time range if (_dsp != null) { if (rp.Time >= 0 && rp.Time < Duration) { _dsp.setBypass(false); return(true); } else { _dsp.setBypass(true); } } return(false); }
private void OnStrokeColorChanged(Object sender, SelectionChangedEventArgs e) { if (StrokeList.SelectedIndex > -1) { _strokeColor = _colors[StrokeList.SelectedIndex]; } RenderCanvas.Invalidate(); }
private void OnFillColorChanged(Object sender, SelectionChangedEventArgs e) { if (FillList.SelectedIndex > -1) { _fillColor = _colors[FillList.SelectedIndex]; } RenderCanvas.Invalidate(); }
/** * adapt a running sample according to renderparams and canvas result */ internal void AdaptChannelSettings(FMOD.Channel channel, RenderParams rp, RenderCanvas canvas) { double a = rp.Ampl * canvas.AmplMultiply + canvas.AmplAdd; double p = rp.Pan + canvas.Pan; channel.setVolume((float)a); channel.setPan((float)p); }
/** * Render() method entry point for any SoundEvent. * returns the adapted canvas, upon which events may 'draw' their output. (e.g. modifiers) * Child classes will typically override Render() and have their own implementation. * Return true if effect was active and activated at this time, false if not active or activated at this time. */ public virtual bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!_isActive) { return(false); // skip if not active } _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); return(RenderChildren(_rp, canvas)); }
public DeviceVisualizer(RenderCanvas renderCanvas, Device device) : base() { this.DoubleBuffered = true; this.renderCanvas = renderCanvas; this.device = device; this.Id = device.Id; InitializeComponent(); setupWithDevice(); setupEvents(); }
public void SetRenderCanvas(RenderCanvas renderCanvas) { this.renderCanvas = renderCanvas; renderCanvas.BackColor = this.BackColor; //!important -> without this setup, the opacity from SpectrumVisualizer can no work properly renderedSpectrumVisualizer.Size = renderCanvas.Size; renderedSpectrumVisualizer.Location = new Point(0, 0); renderCanvas.Controls.Add(renderedSpectrumVisualizer); renderedSpectrumVisualizer.SyncronizeTo = previewSpectrumVisualizer; //Syncronize the two ChromaVisualizer }
private void RenderGL() { if (_renderer != null && _sceneData != null) { _renderer.Draw(); GL.Flush(); RenderCanvas.SwapBuffers(); } }
private void Timer_Tick(object sender, EventArgs e) { lock (lockToken) { if (isExecute == true) { return; } } isExecute = true; RenderCanvas.Invalidate(); isExecute = false; }
internal void RenderOsc(RenderParams rp, RenderCanvas canvas) { //Util.Log("RenOsc HID=" + rp.HierarchyID + " T=" + Math.Round(rp.Time, 3) + " AbsT=" + Math.Round(rp.AbsTime, 3) + " A=" + Math.Round(rp.Ampl, 3) + "\n"); bool wasPlaying = nowPlayingList.ContainsKey(rp.HierarchyID); FMOD.Channel channel = null; FMOD.RESULT r; if (wasPlaying) { channel = nowPlayingList[rp.HierarchyID]; // check if still playing now bool isPlayingNow = false; r = channel.isPlaying(ref isPlayingNow); if (isPlayingNow) { // check if should be stopped now if (rp.Time >= Duration) { channel.stop(); nowPlayingList.Remove(rp.HierarchyID); } else { // if still validly playing, adapt channel properties only. AdaptChannelSettings(channel, rp, canvas); //Util.Log(" rp.A=" + Math.Round(rp.Ampl, 3) + " canv.A=" + canvas.AmplMultiply + "\n"); } } else { // if not anymore, remove from list nowPlayingList.Remove(rp.HierarchyID); } } else { // was not playing but should be rendered - hence, initiate playing now if (rp.Time < Duration - 0.100) // extra safety margin - do not start if close to end. { channel = PlayOsc(rp, canvas); if (channel != null) { // store playing sound in the table nowPlayingList[rp.HierarchyID] = channel; } else { // } } } }
/// <summary> /// 初始化图层 /// </summary> protected override void init() { CreateTemplate(MarkerCanvas, null); //CreateTemplate(AreaCanvas, Areas); PreViewCanvas = new RenderCanvas(ShowList, ShowListExpand, this); // this.Children.Add(AreaCanvas); this.Children.Add(MarkerCanvas); this.Children.Add(PreViewCanvas); PreViewCanvas.Visibility = System.Windows.Visibility.Collapsed; //mThread = new Thread(checkLife); //mThread.IsBackground = true; //mThread.Start(); }
private void ShowSample(int index) { InputData.Text = _samples.ElementAt(index); if (!_selectionChanged) { StrokeThickness.Value = 4; StrokeList.SelectedIndex = 1; FillList.SelectedIndex = 5; _selectionChanged = false; } _data = InputData.Text; RenderCanvas.Invalidate(); }
private void renderCanvas_Paint(object sender, System.Windows.Forms.PaintEventArgs e) { Random r = new Random(); // for (var i = 0; i < sceneData.vertices.Length; i++) // { // sceneData.vertices[i].X += (float) r.NextDouble() * 2 - 1; // sceneData.vertices[i].Y += (float) r.NextDouble() * 2 - 1; // } renderer.Draw(); GL.Flush(); RenderCanvas.SwapBuffers(); }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderCanvas myCanvas = new RenderCanvas(); RenderChildren(rp, myCanvas); // render my audio or signal or ... if within the right time if (rp.Time >= 0 && (rp.Time < (Duration + RENDER_SAFETY_MARGIN_SEC)) && _dsp != null) { RenderOsc(rp, myCanvas); return true; } return false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); // skip if not active } RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); if (rp.Time >= 0 && rp.Time < Duration && _sig != null) { double s = _sig.Render(rp.Time); switch (_modif) { case Modifier.DSP_PARAM: if (_parentsDSP != null) { _parentsDSP.SetParameter(_paramNumber, s); } if (_parentsOsc != null) { _parentsOsc.SetParameter(_paramNumber, s); } break; case Modifier.NONE: break; case Modifier.AMPLITUDE: canvas.AmplMultiply *= s; break; case Modifier.AMPLITUDE_ADDITIVE: canvas.AmplMultiply += s; break; case Modifier.PAN: canvas.Pan += s; break; } return(true); } return(false); } // end method
private void ChangeMyDataLabel(RoutedEventArgs e) { if (this.flexChart.SelectedIndex < 0) { this.MyDataLabel.Visibility = Visibility.Collapsed; this.MyDataLabel.Text = ""; } else { this.MyDataLabel.Visibility = Visibility.Visible; this.MyDataLabel.Text = this.DataList[this.flexChart.SelectedIndex]; object selectedItem = e.OriginalSource; double left = RenderCanvas.GetLeft(selectedItem as UIElement); double top = RenderCanvas.GetTop(selectedItem as UIElement); double width = (selectedItem as FrameworkElement).Width; Canvas.SetLeft(this.MyDataLabel, left + width + 5); Canvas.SetTop(this.MyDataLabel, top); } }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal override bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) { return(false); } // check if a child for rendering is already selected: if not, select one if (_childSelected == null) { // select one child effect random or in sequence, and see if it has to be played now double myRpTime = rp.Time; Random rnd = new Random(); int idx = rnd.Next(_children.Count()); KeyValuePair <double, List <SoundEvent> > kvPair = _children.ElementAt(idx); List <SoundEvent> evs = kvPair.Value; idx = rnd.Next(evs.Count()); _childSelected = evs.ElementAt(idx); } SoundEvent ev = _childSelected; // check if we are in the time range where the effect can work double evEndTime = 0.0 + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - 0.0); // only time is set for each child separately. Rest is same. bool wasActive = ev.Render(rpChild, canvas); if (!wasActive) { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return(wasActive); } else { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return(false); } // end method
public GestureBuilderForm(SmartKinectSensor sensor) { InitializeComponent(); this.sensor = sensor; this.skeletonRenderer = new SkeletonRenderer(this.sensor.Sensor); this.timeSpan = TimeSpan.FromSeconds(10); this.renderCanvas = new RenderCanvas(timeSpan); this.skeletonRenderer.SkeletonRendered += renderCanvas.SkeletonFrameCaptured; this.renderCanvas.ImageRendered += new ImagingUtilities.ImageRenderedEventHandler(renderCanvas_ImageRendered); this.renderCanvas.ReplayCanvasComplete += new ImagingUtilities.ImageRenderingCompleteEventHandler(renderCanvas_ReplayCanvasComplete); this.sensor.SkeletonController.AddFunction(this.skeletonRenderer); this.recording = true; this.replaying = false; InitializeFormControls(); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal virtual bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) { return(false); } // loop all child effects and see if they have to be played now bool wasActive = false; foreach (KeyValuePair <double, List <SoundEvent> > pair in _children) { double evStartTime = pair.Key; // check if effect lies in the future. In this case, we can break now. All further child effects // will be even later in time so we do not need to iterate these further if (evStartTime > rp.Time + rp.RenderAheadTime) { break; } // loop all events at that specific time 't' foreach (SoundEvent ev in pair.Value) { // check if we are in the time range where the effect can work double evEndTime = evStartTime + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - evStartTime); // only time is set for each child separately. Rest is same. bool wasChildActive = ev.Render(rpChild, canvas); if (wasChildActive) { wasActive = true; } } } } // foreach over children return(wasActive); } // end method
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); } RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderCanvas myCanvas = new RenderCanvas(); RenderChildren(rp, myCanvas); // render my audio or signal or ... if within the right time if (rp.Time >= 0 && (rp.Time < (Duration + RENDER_SAFETY_MARGIN_SEC)) && _dsp != null) { RenderOsc(rp, myCanvas); return(true); } return(false); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal override bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) return false; // check if a child for rendering is already selected: if not, select one if (_childSelected == null) { // select one child effect random or in sequence, and see if it has to be played now double myRpTime = rp.Time; Random rnd = new Random(); int idx = rnd.Next(_children.Count()); KeyValuePair<double, List<SoundEvent>> kvPair = _children.ElementAt(idx); List<SoundEvent> evs = kvPair.Value; idx = rnd.Next(evs.Count()); _childSelected = evs.ElementAt(idx); } SoundEvent ev = _childSelected; // check if we are in the time range where the effect can work double evEndTime = 0.0 + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - 0.0); // only time is set for each child separately. Rest is same. bool wasActive= ev.Render(rpChild, canvas); if (!wasActive) { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return wasActive; } else { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); // enable dsp effect only when in right time range if (_dsp != null) { if (rp.Time >= 0 && rp.Time < Duration) { _dsp.setBypass(false); return true; } else { _dsp.setBypass(true); } } return false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; // skip if not active RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); if (rp.Time >= 0 && rp.Time < Duration && _sig != null) { double s = _sig.Render(rp.Time); switch (_modif) { case Modifier.DSP_PARAM: if (_parentsDSP != null) { _parentsDSP.SetParameter(_paramNumber, s); } if (_parentsOsc != null) { _parentsOsc.SetParameter(_paramNumber, s); } break; case Modifier.NONE: break; case Modifier.AMPLITUDE: canvas.AmplMultiply *= s; break; case Modifier.AMPLITUDE_ADDITIVE: canvas.AmplMultiply += s; break; case Modifier.PAN: canvas.Pan += s; break; } return true; } return false; }
FMOD.Channel PlaySample(RenderParams rp, RenderCanvas canvas, List <FMOD.DSP> dspList) { FMOD.Channel channel = null; FMOD.RESULT r = MusicEngine.AudioEngine.playSound(FMOD.CHANNELINDEX.FREE, _sound, true, ref channel); if (r == FMOD.RESULT.OK && channel != null) { // set regular play properties AdaptChannelSettings(channel, rp, canvas); // set play position uint tMs = (uint)Math.Round(rp.Time * 1000.0); if (tMs > 0) { // only set position if there is a need r = channel.setPosition(tMs, FMOD.TIMEUNIT.MS); #if DEBUG Util.Log(" setPos(" + tMs + ")"); #endif } // set optional DSP unit(s) on channel if (r == FMOD.RESULT.OK) { FMOD.DSPConnection conn = null; foreach (FMOD.DSP d in dspList) { r = channel.addDSP(d, ref conn); // TODO errcheck } // go - start playing if (r == FMOD.RESULT.OK) { r = channel.setPaused(false); } } } // if Util.ERRCHECK(r); return(channel); }
private void renderControl_HandleCreated(object sender, EventArgs e) { // // STEP TWO - Now the underlying Windows Window was created - we can hook OpenGL on it. // // Take this as an example how to hook up any FUSEE application on a given Winforms form: // First create a WinformsHost around the control _currentHost = new WinformsHost(_currentControl, this); // Then instantiate your app (could be as well _currentApp = new MyOwnRenderCanvasDerivedClass(); ) _currentApp = _appFinder.Instantiate(_currentInx); // Now use the host as the canvas AND the input implementation of your App _currentApp.CanvasImplementor = _currentHost; _currentApp.InputImplementor = _currentHost; // Then you can run the app _currentApp.Run(); // If not already done, show the window. _currentControl.Show(); }
FMOD.Channel PlayOsc(RenderParams rp, RenderCanvas canvas) { FMOD.Channel channel = null; FMOD.RESULT r = MusicEngine.AudioEngine.playDSP(FMOD.CHANNELINDEX.FREE, _dsp, true, ref channel); if (r == FMOD.RESULT.OK && channel != null) { // set regular play properties AdaptChannelSettings(channel, rp, canvas); // set optional DSP unit(s) on channel if (r == FMOD.RESULT.OK) { FMOD.DSPConnection conn = null; foreach (FMOD.DSP d in _dspList) { r = channel.addDSP(d, ref conn); // TODO errcheck } // go - start playing if (r == FMOD.RESULT.OK) { r = channel.setPaused(false); } } } // if return channel; }
private void OnStrokeThicknessChanged(Object sender, RangeBaseValueChangedEventArgs e) { _strokeThickness = StrokeThickness.Value.ToSingle(); RenderCanvas.Invalidate(); }
/** * Render() method entry point for any SoundEvent. * returns the adapted canvas, upon which events may 'draw' their output. (e.g. modifiers) * Child classes will typically override Render() and have their own implementation. * Return true if effect was active and activated at this time, false if not active or activated at this time. */ public virtual bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!_isActive) return false; // skip if not active _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); return RenderChildren(_rp, canvas); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal virtual bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) return false; // loop all child effects and see if they have to be played now bool wasActive = false; foreach (KeyValuePair<double, List<SoundEvent>> pair in _children) { double evStartTime = pair.Key ; // check if effect lies in the future. In this case, we can break now. All further child effects // will be even later in time so we do not need to iterate these further if (evStartTime > rp.Time + rp.RenderAheadTime ) break; // loop all events at that specific time 't' foreach (SoundEvent ev in pair.Value) { // check if we are in the time range where the effect can work double evEndTime = evStartTime + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time ) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - evStartTime); // only time is set for each child separately. Rest is same. bool wasChildActive = ev.Render(rpChild, canvas); if (wasChildActive) wasActive = true; } } }// foreach over children return wasActive; }
private void WindowsFormsHost_Initialized(object sender, EventArgs e) { RenderCanvas.MakeCurrent(); }
public static void Main(string[] args) { // Inject Fusee.Engine.Base InjectMe dependencies IO.IOImp = new Fusee.Base.Imp.Desktop.IOImp(); Type tApp = null; string modelFile = null; List <string> assetDirs = new List <string>(); TryAddDir(assetDirs, "Assets"); string ExeDir = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); string Cwd = Directory.GetCurrentDirectory(); if (Cwd != ExeDir) { TryAddDir(assetDirs, Path.Combine(ExeDir, "Assets")); } if (args.Length >= 1) { if (File.Exists(args[0])) { TryAddDir(assetDirs, Path.GetDirectoryName(args[0])); if (Path.GetExtension(args[0]).ToLower().Contains("fus")) { // A .fus file - open it. modelFile = Path.GetFileName(args[0]); } else { // See if the passed argument is an entire Fusee App DLL try { Assembly asm = Assembly.LoadFrom(args[0]); tApp = asm.GetTypes().FirstOrDefault(t => typeof(RenderCanvas).IsAssignableFrom(t)); TryAddDir(assetDirs, Path.Combine(Path.GetDirectoryName(args[0]), "Assets")); } catch (Exception e) { Diagnostics.Log(e.ToString()); } } } else { Diagnostics.Log($"Cannot open {args[0]}."); } } if (tApp == null) { // See if we are in "Deployed mode". That is: A Fusee.App.dll is lying next to us. try { Assembly asm = Assembly.LoadFrom(Path.Combine(ExeDir, "Fusee.App.dll")); tApp = asm.GetTypes().FirstOrDefault(t => typeof(RenderCanvas).IsAssignableFrom(t)); } catch (Exception e) { Diagnostics.Log(e.ToString()); } // No App was specified and we're not in Deplyed mode. Simply use the default App (== Viewer) if (tApp == null) { tApp = typeof(Fusee.Engine.Player.Core.Player); } } var fap = new Fusee.Base.Imp.Desktop.FileAssetProvider(assetDirs); fap.RegisterTypeHandler( new AssetHandler { ReturnedType = typeof(Font), Decoder = delegate(string id, object storage) { if (!Path.GetExtension(id).ToLower().Contains("ttf")) { return(null); } return(new Font { _fontImp = new FontImp((Stream)storage) }); }, Checker = id => Path.GetExtension(id).ToLower().Contains("ttf") }); fap.RegisterTypeHandler( new AssetHandler { ReturnedType = typeof(SceneContainer), Decoder = delegate(string id, object storage) { if (!Path.GetExtension(id).ToLower().Contains("fus")) { return(null); } var ser = new Serializer(); var scene = ser.Deserialize((Stream)storage, null, typeof(SceneContainer)); var container = scene as SceneContainer; return(new ConvertSceneGraph().Convert(container)); }, Checker = id => Path.GetExtension(id).ToLower().Contains("fus") }); AssetStorage.RegisterProvider(fap); // Dynamically instantiate the app because it might live in some external (.NET core) DLL. var ctor = tApp.GetConstructor(Type.EmptyTypes); if (ctor == null) { Diagnostics.Log($"Cannot instantiate FUSEE App. {tApp.Name} contains no default constructor"); } else { // invoke the first public constructor with no parameters. RenderCanvas app = (RenderCanvas)ctor.Invoke(new object[] { }); if (!string.IsNullOrEmpty(modelFile) && app is Fusee.Engine.Player.Core.Player) { ((Fusee.Engine.Player.Core.Player)app).ModelFile = modelFile; } // Inject Fusee.Engine InjectMe dependencies (hard coded) System.Drawing.Icon appIcon = System.Drawing.Icon.ExtractAssociatedIcon(Assembly.GetExecutingAssembly().Location); app.CanvasImplementor = new Fusee.Engine.Imp.Graphics.Desktop.RenderCanvasImp(appIcon); app.ContextImplementor = new Fusee.Engine.Imp.Graphics.Desktop.RenderContextImp(app.CanvasImplementor); Input.AddDriverImp(new Fusee.Engine.Imp.Graphics.Desktop.RenderCanvasInputDriverImp(app.CanvasImplementor)); Input.AddDriverImp(new Fusee.Engine.Imp.Graphics.Desktop.WindowsSpaceMouseDriverImp(app.CanvasImplementor)); Input.AddDriverImp(new Fusee.Engine.Imp.Graphics.Desktop.WindowsTouchInputDriverImp(app.CanvasImplementor)); // app.InputImplementor = new Fusee.Engine.Imp.Graphics.Desktop.InputImp(app.CanvasImplementor); // app.AudioImplementor = new Fusee.Engine.Imp.Sound.Desktop.AudioImp(); // app.NetworkImplementor = new Fusee.Engine.Imp.Network.Desktop.NetworkImp(); // app.InputDriverImplementor = new Fusee.Engine.Imp.Input.Desktop.InputDriverImp(); // app.VideoManagerImplementor = ImpFactory.CreateIVideoManagerImp(); // Start the app app.Run(); } }
private void ParseData() { _data = InputData.Text; RenderCanvas.Invalidate(); }
private void OnStrokeThicknessChanged(object sender, RangeBaseValueChangedEventArgs e) { _strokeThickness = (float)StrokeThickness.Value; _selectionChanged = true; RenderCanvas.Invalidate(); }