protected override void DrawForShadow(RenderParams renderParams) { if (entityNature != entityNatureType.Wire) { base.DrawForShadow(renderParams); } }
protected bool RequestImage(RenderParams renderParams, RequestImageCallback callback) { CallbackToken callbackToken = renderParams.CallbackToken; if (completionCallbacks.ContainsKey(callbackToken)) { completionCallbacks[callbackToken].Add(callback); return(false); } if (imageCache.ContainsImage(renderParams.ImageHash)) { Texture2D textureFromCache = imageCache.GetTextureFromCache(renderParams.ImageHash); callback(success: true, textureFromCache, callbackToken); return(true); } List <RequestImageCallback> list = new List <RequestImageCallback>(); list.Add(callback); completionCallbacks.Add(callbackToken, list); renderQueue.Enqueue(renderParams); if (renderQueue.Count == 1) { CoroutineRunner.Start(setupAndProcessRequests(), this, "ProcessNextRequest"); } return(false); }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); } _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); //Util.Log("Render HID=" + rp.HierarchyID + " ID=" + this.ID + "\n"); // make a new canvas - anything painted on here by children, will // be used in this SOundEvent. RenderCanvas myCanvas = new RenderCanvas(); // Render to rp/myCanvas, to retrieve adapted render-params based on possible // effects attached to me as child nodes. RenderChildren(_rp, myCanvas); // render my audio ... if within the right time if (_rp.Time >= 0 && _rp.Time < Duration) { _audio.Render(_rp, myCanvas, _dspList, _audioRepeats); canvas.TimeMarker = myCanvas.TimeMarker; return(true); } return(false); }
protected override IEnumerator processRequest(RenderParams renderParam) { ItemRenderParams param = renderParam as ItemRenderParams; if (penguinPreload != LoadingTriState.FAILURE) { Color bodyColor = ((param.bodyColor != Color.clear) ? param.bodyColor : defaultBodyColor); penguin.transform.localPosition = new Vector3(10f, 0f, 0f); avatarModel.ClearAllEquipment(); try { avatarModel.ApplyOutfit(createOutfit(param.equipment)); } catch (Exception ex) { Log.LogErrorFormatted(this, "When applying an outfit to the avatar model an error occurred. Icon not rendered. Message: {0}", ex); yield break; } avatarModel.BeakColor = bodyColor; avatarModel.BellyColor = bodyColor; avatarModel.BodyColor = bodyColor; while (!avatarView.IsReady) { yield return(null); } LightCullingMaskHelper.SetLayerIncludingChildren(penguin.transform, "IconRender"); modifyMaterials(bodyColor); yield return(renderToTexture(param)); } }
public static void render(Graphics g, Screen[] screens, int scrNo, RenderParams renderParams) { g.InterpolationMode = System.Drawing.Drawing2D.InterpolationMode.NearestNeighbor; g.PixelOffsetMode = System.Drawing.Drawing2D.PixelOffsetMode.Half; var curScreen = screens[scrNo]; for (int layerIndex = 0; layerIndex < curScreen.layers.Length; layerIndex++) { var layer = screens[scrNo].layers[layerIndex]; renderLayer(g, layer, renderParams); } renderLayer(g, curScreen.physicsLayer, new RenderParams(renderParams) { renderBlockFunc = renderPhysicsBlock }); if (renderParams.showBorder) { int tileSizeX = (int)(renderParams.bigBlocks[0].Width * renderParams.curScale); int tileSizeY = (int)(renderParams.bigBlocks[0].Height * renderParams.curScale); g.DrawRectangle(new Pen(Color.Green, 4.0f), new Rectangle(tileSizeX, 0, tileSizeX * renderParams.width, tileSizeY * renderParams.height)); } //Additional rendering //float to int! if (renderParams.additionalRenderEnabled) { ConfigScript.renderToMainScreen(g, (int)renderParams.curScale, scrNo); } }
/// <summary> /// Render the surface to screen. /// </summary> /// <param name="rp">Rendering parameters to use</param> public void Render(RenderParams rp) { // Only render if surface is enabled if (!this.IsEnabled) { return; } // Activate material this.Material.Use(); this.Material.ApplyParameters(rp); // Upload GPU data if needed if (this.IsDirty) { this.IsDirty = false; this.BufferTexture.Upload(this.Data); } // Activate empty buffers this.EmptyBuffers.Use(); // Activate all textures this.TilesetTexture.Use(TextureUnit.Texture0); this.ShadowTexture.Use(TextureUnit.Texture1); this.BufferTexture.Use(TextureUnit.Texture2); // Render instanced quad for each tile on the surface GL.DrawArraysInstanced(PrimitiveType.Triangles, 0, 6, this.Dimensions.Width * this.Dimensions.Height); }
FMOD.Channel PlayOsc(RenderParams rp, RenderCanvas canvas) { FMOD.Channel channel = null; FMOD.RESULT r = MusicEngine.AudioEngine.playDSP(FMOD.CHANNELINDEX.FREE, _dsp, true, ref channel); if (r == FMOD.RESULT.OK && channel != null) { // set regular play properties AdaptChannelSettings(channel, rp, canvas); // set optional DSP unit(s) on channel if (r == FMOD.RESULT.OK) { FMOD.DSPConnection conn = null; foreach (FMOD.DSP d in _dspList) { r = channel.addDSP(d, ref conn); // TODO errcheck } // go - start playing if (r == FMOD.RESULT.OK) { r = channel.setPaused(false); } } } // if return(channel); }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); } RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); // enable dsp effect only when in right time range if (_dsp != null) { if (rp.Time >= 0 && rp.Time < Duration) { _dsp.setBypass(false); return(true); } else { _dsp.setBypass(true); } } return(false); }
public RunCommand(RenderParams renderParams = RenderParams.AsSlash) { _params = new ParameterDictionary(); Arguments = string.Empty; _renderParams = renderParams; Out = Console.Out; Error = Console.Error; }
/// <summary> /// Render active scene on the stack /// </summary> /// <param name="rp">Render parameters to use</param> public void Render(RenderParams rp) { // Render top scene, if any if (this.Scenes.Count > 0) { this.Scenes.Peek().Render(rp); } }
/** * adapt a running sample according to renderparams and canvas result */ internal void AdaptChannelSettings(FMOD.Channel channel, RenderParams rp, RenderCanvas canvas) { double a = rp.Ampl * canvas.AmplMultiply + canvas.AmplAdd; double p = rp.Pan + canvas.Pan; channel.setVolume((float)a); channel.setPan((float)p); }
public void Render( RenderTexture next, Texture prev, Texture refTex, RenderParams rparams ) { Render(next, prev, refTex, rparams, Time.deltaTime); }
public T GetParam <T>(string ParamName) { var param = RenderParams.Find(p => p.Name.Equals(ParamName, StringComparison.InvariantCultureIgnoreCase)); if (param.Value == null) { return(default(T)); } return((T)param.Value); }
/** * Render() method entry point for any SoundEvent. * returns the adapted canvas, upon which events may 'draw' their output. (e.g. modifiers) * Child classes will typically override Render() and have their own implementation. * Return true if effect was active and activated at this time, false if not active or activated at this time. */ public virtual bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!_isActive) { return(false); // skip if not active } _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); return(RenderChildren(_rp, canvas)); }
protected override void Render(RenderParams data) { if (!multiTexture) { // Set the texture replace function data.RenderContext.SetShader(shaderType.Texture2DNoLights); data.RenderContext.SetTexture(floorTexture); data.RenderContext.DrawRichPlainQuads(Vertices, new Vector3D[] { Vector3D.AxisZ, Vector3D.AxisZ, Vector3D.AxisZ, Vector3D.AxisZ }, new PointF[] { new PointF(0, 0), new PointF(tile_S, 0), new PointF(tile_S, tile_T), new PointF(0, tile_T) }); } }
/// <summary> /// Render this scene /// </summary> public override void Render(RenderParams rp) { this._surface.Clear(); this.DrawMap(); this.DrawOverview(); this.DrawWorldGenProgress(); this.DrawTileInfo(); this.DrawKeybindings(); this._surface.Render(rp); }
protected override void Render(RenderParams data) { if (entityNature == entityNatureType.Wire) { data.RenderContext.Draw(wireGraphicsData); } else { base.Render(data); } }
public static Image screenToImage(Screen[] screens, int scrNo, RenderParams renderParams) { int tileSizeX = (int)(renderParams.bigBlocks[0].Width * renderParams.curScale); int tileSizeY = (int)(renderParams.bigBlocks[0].Height * renderParams.curScale); Image result = new Bitmap(renderParams.width * tileSizeX, renderParams.height * tileSizeY); using (var g = Graphics.FromImage(result)) { render(g, screens, scrNo, renderParams); } return(result); }
internal void RenderOsc(RenderParams rp, RenderCanvas canvas) { //Util.Log("RenOsc HID=" + rp.HierarchyID + " T=" + Math.Round(rp.Time, 3) + " AbsT=" + Math.Round(rp.AbsTime, 3) + " A=" + Math.Round(rp.Ampl, 3) + "\n"); bool wasPlaying = nowPlayingList.ContainsKey(rp.HierarchyID); FMOD.Channel channel = null; FMOD.RESULT r; if (wasPlaying) { channel = nowPlayingList[rp.HierarchyID]; // check if still playing now bool isPlayingNow = false; r = channel.isPlaying(ref isPlayingNow); if (isPlayingNow) { // check if should be stopped now if (rp.Time >= Duration) { channel.stop(); nowPlayingList.Remove(rp.HierarchyID); } else { // if still validly playing, adapt channel properties only. AdaptChannelSettings(channel, rp, canvas); //Util.Log(" rp.A=" + Math.Round(rp.Ampl, 3) + " canv.A=" + canvas.AmplMultiply + "\n"); } } else { // if not anymore, remove from list nowPlayingList.Remove(rp.HierarchyID); } } else { // was not playing but should be rendered - hence, initiate playing now if (rp.Time < Duration - 0.100) // extra safety margin - do not start if close to end. { channel = PlayOsc(rp, canvas); if (channel != null) { // store playing sound in the table nowPlayingList[rp.HierarchyID] = channel; } else { // } } } }
protected override IEnumerator processRequest(RenderParams renderParam) { DecorationRenderParams param = renderParam as DecorationRenderParams; DecorationDefinition decorationDefinition = null; if (!decorationList.TryGetValue(param.Definition.Id, out decorationDefinition)) { Log.LogErrorFormatted(this, "Unable to locate decoration {0} in decoration definitions with id {1}.", param.Definition.Name, param.Definition.Id); } else { yield return(loadDecorationPrefab(param)); } }
protected override IEnumerator processRequest(RenderParams renderParam) { StructureRenderParams param = renderParam as StructureRenderParams; StructureDefinition structureDefinition = null; if (!structureList.TryGetValue(param.Definition.Id, out structureDefinition)) { Log.LogErrorFormatted(this, "Unable to locate structure {0} in structure definitions with id {1}.", param.Definition.Name, param.Definition.Id); } else { yield return(loadStructurePrefab(param)); } }
public RenderParams(RenderParams other) { bigBlocks = other.bigBlocks; visibleRect = other.visibleRect; curScale = other.curScale; showBorder = other.showBorder; showBlocksAxis = other.showBlocksAxis; leftMargin = other.leftMargin; topMargin = other.topMargin; width = other.width; height = other.height; additionalRenderEnabled = other.additionalRenderEnabled; renderBlockFunc = other.renderBlockFunc; }
public void Render(RenderTarget target) { if (tree == null) { RenderParams textParams = new RenderParams(); textParams.tintColor = Color.Red; textParams.position = new Vector2f(50f, 50f); target.DrawText(bodyFont.Get <Font>(), "Tree is null, nothing to display", 16, textParams); return; } Vector2f refVector = Vector2f.Zero; RenderNode(target, tree.root, ref refVector); }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderCanvas myCanvas = new RenderCanvas(); RenderChildren(rp, myCanvas); // render my audio or signal or ... if within the right time if (rp.Time >= 0 && (rp.Time < (Duration + RENDER_SAFETY_MARGIN_SEC)) && _dsp != null) { RenderOsc(rp, myCanvas); return true; } return false; }
/// <summary> /// Render main menu scene to screen /// </summary> public override void Render(RenderParams rp) { this._titleSurface.Clear(); this._mainSurface.Clear(); this.DrawTitle(); this.DrawMenu(); if (this._selectWorld) { this.DrawWorldSelection(); } this._titleSurface.Render(rp); this._mainSurface.Render(rp); }
private void SetupRenderManager() { _graphicsLibrary = new GraphicsLibraryOpenGL(); _graphicsLibrary.Toolkit = new MonoGameGraphicsToolkit(); _renderManager = new RenderManagerOpenGL(_context, "OpenGL", _graphicsLibrary); while (!_renderManager.DoingOkay) { _context.update(); } _renderManager.OpenDisplay(); _renderInfo = new RenderInfoOpenGL[2]; _renderParams = new RenderParams(); _renderManager.GetRenderInfo(_renderParams, ref _renderInfo); for (var i = 0; i < _renderInfo.Length; i++) { _width += (int)_renderInfo[i].Viewport.Width; if (_height != 0 && _height != (int)_renderInfo[i].Viewport.Height) { throw new InvalidOperationException("All RT must have the same height."); } _height = (int)_renderInfo[i].Viewport.Height; } _normalizedCroppingViewports = new ViewportDescription[_renderInfo.Length]; for (var i = 0; i < _normalizedCroppingViewports.Length; i++) { _normalizedCroppingViewports[i] = new ViewportDescription { Height = 1.0, Width = _renderInfo[i].Viewport.Width / _width, Left = (i * _renderInfo[i].Viewport.Width) / _width, Lower = 0 }; } _buffers = new RenderBufferOpenGL[2]; for (var i = 0; i < _buffers.Length; i++) { _buffers[i] = new RenderBufferOpenGL(); } }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) { return(false); // skip if not active } RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); if (rp.Time >= 0 && rp.Time < Duration && _sig != null) { double s = _sig.Render(rp.Time); switch (_modif) { case Modifier.DSP_PARAM: if (_parentsDSP != null) { _parentsDSP.SetParameter(_paramNumber, s); } if (_parentsOsc != null) { _parentsOsc.SetParameter(_paramNumber, s); } break; case Modifier.NONE: break; case Modifier.AMPLITUDE: canvas.AmplMultiply *= s; break; case Modifier.AMPLITUDE_ADDITIVE: canvas.AmplMultiply += s; break; case Modifier.PAN: canvas.Pan += s; break; } return(true); } return(false); } // end method
/** * adapt the RenderParams using specific fixed param fields defined in this event. * Such as pan, volume, Time (in case of repeats) and a new hierarchy-id. */ internal void AdaptRenderParams(RenderParams rp) { // adapt rendering-params UID for rendering/children rp.HierarchyID = Util.HashValues(rp.HierarchyID, _id); // adapt render params with instructions from this event (also passed along to children via rp) rp.Pan += _pan; rp.Ampl *= _ampl; rp.Time *= _speed; // if repeats are active, 'wind back' time where needed so that event is played again and again if (_repeats > 1 && rp.Time < Duration) { double Nwindbacks = Math.Floor(rp.Time / _duration); rp.Time -= _duration * Nwindbacks; } }
public void Render( RenderTexture next, Texture prev, Texture refTex, RenderParams rparams, float dt ) { mat.SetVector(P_User_Time, new Vector4(dt, Time.timeSinceLevelLoad, 0f, 0f)); mat.SetVector(P_Throttle, new Vector4(rparams.light, rparams.dark)); mat.SetVector(P_ColorAdjust, rparams.colorAdjuster); mat.SetVector(P_BWPoints, new Vector4(rparams.blackWhtePoints.x, rparams.blackWhtePoints.y, 0f, 1f)); mat.SetTexture(P_RefTex, refTex); Graphics.Blit(prev, next, mat); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal override bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) { return(false); } // check if a child for rendering is already selected: if not, select one if (_childSelected == null) { // select one child effect random or in sequence, and see if it has to be played now double myRpTime = rp.Time; Random rnd = new Random(); int idx = rnd.Next(_children.Count()); KeyValuePair <double, List <SoundEvent> > kvPair = _children.ElementAt(idx); List <SoundEvent> evs = kvPair.Value; idx = rnd.Next(evs.Count()); _childSelected = evs.ElementAt(idx); } SoundEvent ev = _childSelected; // check if we are in the time range where the effect can work double evEndTime = 0.0 + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - 0.0); // only time is set for each child separately. Rest is same. bool wasActive = ev.Render(rpChild, canvas); if (!wasActive) { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return(wasActive); } else { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return(false); } // end method
private static string InternalRenderLite(StringBuilder sb, RenderParams parameters) { var doneHashset = new HashSet <string>(); if (parameters == null) { return(sb.ToString()); } var pmatches = NormalTagRegex.Matches(sb.ToString()); foreach (Match pmatch in pmatches) { if (doneHashset.Contains(pmatch.Value)) { continue; } var m = pmatch.Value.Trim('<', '>', '%', ' '); var t = parameters[m]; if (t == null) { continue; } sb.Replace(pmatch.Value, t); doneHashset.Add(pmatch.Value); } pmatches = HtmlTagRegex.Matches(sb.ToString()); foreach (Match pmatch in pmatches) { if (doneHashset.Contains(pmatch.Value)) { continue; } var m = pmatch.Value.Trim('<', '>', '%', '=', ' '); var t = parameters[m]; if (string.IsNullOrEmpty(t)) { continue; } sb.Replace(pmatch.Value, HttpUtility.HtmlEncode(t)); doneHashset.Add(pmatch.Value); } return(sb.ToString()); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal virtual bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) { return(false); } // loop all child effects and see if they have to be played now bool wasActive = false; foreach (KeyValuePair <double, List <SoundEvent> > pair in _children) { double evStartTime = pair.Key; // check if effect lies in the future. In this case, we can break now. All further child effects // will be even later in time so we do not need to iterate these further if (evStartTime > rp.Time + rp.RenderAheadTime) { break; } // loop all events at that specific time 't' foreach (SoundEvent ev in pair.Value) { // check if we are in the time range where the effect can work double evEndTime = evStartTime + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - evStartTime); // only time is set for each child separately. Rest is same. bool wasChildActive = ev.Render(rpChild, canvas); if (wasChildActive) { wasActive = true; } } } } // foreach over children return(wasActive); } // end method
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal override bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) return false; // check if a child for rendering is already selected: if not, select one if (_childSelected == null) { // select one child effect random or in sequence, and see if it has to be played now double myRpTime = rp.Time; Random rnd = new Random(); int idx = rnd.Next(_children.Count()); KeyValuePair<double, List<SoundEvent>> kvPair = _children.ElementAt(idx); List<SoundEvent> evs = kvPair.Value; idx = rnd.Next(evs.Count()); _childSelected = evs.ElementAt(idx); } SoundEvent ev = _childSelected; // check if we are in the time range where the effect can work double evEndTime = 0.0 + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - 0.0); // only time is set for each child separately. Rest is same. bool wasActive= ev.Render(rpChild, canvas); if (!wasActive) { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return wasActive; } else { _childSelected = null; // reset back - next time, another event may be selected into _childSelected } return false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); // enable dsp effect only when in right time range if (_dsp != null) { if (rp.Time >= 0 && rp.Time < Duration) { _dsp.setBypass(false); return true; } else { _dsp.setBypass(true); } } return false; }
public override bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!Active) return false; // skip if not active RenderParams rp = new RenderParams(parentRp); AdaptRenderParams(rp); RenderChildren(rp, canvas); if (rp.Time >= 0 && rp.Time < Duration && _sig != null) { double s = _sig.Render(rp.Time); switch (_modif) { case Modifier.DSP_PARAM: if (_parentsDSP != null) { _parentsDSP.SetParameter(_paramNumber, s); } if (_parentsOsc != null) { _parentsOsc.SetParameter(_paramNumber, s); } break; case Modifier.NONE: break; case Modifier.AMPLITUDE: canvas.AmplMultiply *= s; break; case Modifier.AMPLITUDE_ADDITIVE: canvas.AmplMultiply += s; break; case Modifier.PAN: canvas.Pan += s; break; } return true; } return false; }
public void Parse(GameBitBuffer buffer) { Field0 = buffer.ReadInt(32); Field1 = buffer.ReadInt(32); Field2 = new RenderParams(); Field2.Parse(buffer); Field3 = buffer.ReadInt(32); Field4 = new TextureStageParams[16]; for(int i = 0;i < _Field4.Length;i++) { _Field4[i] = new TextureStageParams(); _Field4[i].Parse(buffer); } Field5 = buffer.ReadInt(32); Field6 = buffer.ReadCharArray(256); Field7 = buffer.ReadCharArray(64); Field8 = buffer.ReadCharArray(64); serTagMap = new SerializeData(); serTagMap.Parse(buffer); hTagMap = new DT_TAGMAP(); hTagMap.Parse(buffer); }
public float Render(Graphics g, ref RenderParams rp) { return Traceback.Render(g, ref rp, ToString(false)); }
/** * Render() method entry point for any SoundEvent. * returns the adapted canvas, upon which events may 'draw' their output. (e.g. modifiers) * Child classes will typically override Render() and have their own implementation. * Return true if effect was active and activated at this time, false if not active or activated at this time. */ public virtual bool Render(RenderParams parentRp, RenderCanvas canvas) { if (!_isActive) return false; // skip if not active _rp = new RenderParams(parentRp); AdaptRenderParams(_rp); return RenderChildren(_rp, canvas); }
/** * called internally from Render() method, by any event that needs/wants to render its child events */ internal virtual bool RenderChildren(RenderParams rp, RenderCanvas canvas) { if (_children.Count() == 0) return false; // loop all child effects and see if they have to be played now bool wasActive = false; foreach (KeyValuePair<double, List<SoundEvent>> pair in _children) { double evStartTime = pair.Key ; // check if effect lies in the future. In this case, we can break now. All further child effects // will be even later in time so we do not need to iterate these further if (evStartTime > rp.Time + rp.RenderAheadTime ) break; // loop all events at that specific time 't' foreach (SoundEvent ev in pair.Value) { // check if we are in the time range where the effect can work double evEndTime = evStartTime + ev.Duration; ///_timeSpeedupFactor ; if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time ) // if end time lies in the future... { // --render the child effect, shifted in time/pan/amplitude by use of rp. RenderParams rpChild = new RenderParams(rp); rpChild.Time = (rp.Time - evStartTime); // only time is set for each child separately. Rest is same. bool wasChildActive = ev.Render(rpChild, canvas); if (wasChildActive) wasActive = true; } } }// foreach over children return wasActive; }
public void Render(RenderParams rp) { rp.HeightTexture.Bind(TextureUnit.Texture3); rp.ShadeTexture.Bind(TextureUnit.Texture4); //rp.CloudTexture.Bind(TextureUnit.Texture4); //rp.CloudDepthTexture.Bind(TextureUnit.Texture5); rp.IndirectIlluminationTexture.Bind(TextureUnit.Texture5); rp.SkyCubeTexture.Bind(TextureUnit.Texture6); rp.DepthTexture.Bind(TextureUnit.Texture7); rp.MiscTexture.Bind(TextureUnit.Texture8); rp.MiscTexture2.Bind(TextureUnit.Texture9); this.gbufferCombiner.Render(projection, modelview, (sp) => { sp.SetUniform("pre_projection_matrix", rp.GBufferProjectionMatrix); sp.SetUniform("eyePos", rp.EyePos); sp.SetUniform("sunVector", rp.SunDirection); sp.SetUniform("paramTex", 0); sp.SetUniform("normalTex", 1); sp.SetUniform("normalLargeScaleTex", 2); sp.SetUniform("heightTex", 3); sp.SetUniform("shadeTex", 4); sp.SetUniform("indirectTex", 5); sp.SetUniform("skyCubeTex", 6); sp.SetUniform("depthTex", 7); sp.SetUniform("miscTex", 8); sp.SetUniform("miscTex2", 9); sp.SetUniform("minHeight", rp.MinHeight); sp.SetUniform("maxHeight", rp.MaxHeight); sp.SetUniform("exposure", rp.Exposure); sp.SetUniform("Kr", rp.Kr); sp.SetUniform("sunLight", rp.SunLight); sp.SetUniform("scatterAbsorb", rp.ScatterAbsorb); sp.SetUniform("mieBrightness", rp.MieBrightness); sp.SetUniform("miePhase", rp.MiePhase); sp.SetUniform("raleighBrightness", rp.RaleighBrightness); sp.SetUniform("skylightBrightness", rp.SkylightBrightness); sp.SetUniform("groundLevel", rp.GroundLevel); sp.SetUniform("sampleDistanceFactor", rp.SampleDistanceFactor); sp.SetUniform("nearScatterDistance", rp.NearScatterDistance); sp.SetUniform("nearMieBrightness", rp.NearMieBrightness); sp.SetUniform("aoInfluenceHeight", rp.AOInfluenceHeight); sp.SetUniform("ambientBias", rp.AmbientBias); sp.SetUniform("indirectBias", rp.IndirectBias); sp.SetUniform("renderMode", rp.RenderMode); sp.SetUniform("snowSlopeDepthAdjust", rp.SnowSlopeDepthAdjust); sp.SetUniform("normalBlendNearDistance", rp.NormalBlendNearDistance); sp.SetUniform("normalBlendFarDistance", rp.NormalBlendFarDistance); sp.SetUniform("scatteringInitialStepSize", rp.ScatteringInitialStepSize); sp.SetUniform("scatteringStepGrowthFactor", rp.ScatteringStepGrowthFactor); sp.SetUniform("time", rp.Time); sp.SetUniform("boxparam", new Vector4((float)rp.TileWidth, (float)rp.TileHeight, 0.0f, 1.0f)); }); }
FMOD.Channel PlayOsc(RenderParams rp, RenderCanvas canvas) { FMOD.Channel channel = null; FMOD.RESULT r = MusicEngine.AudioEngine.playDSP(FMOD.CHANNELINDEX.FREE, _dsp, true, ref channel); if (r == FMOD.RESULT.OK && channel != null) { // set regular play properties AdaptChannelSettings(channel, rp, canvas); // set optional DSP unit(s) on channel if (r == FMOD.RESULT.OK) { FMOD.DSPConnection conn = null; foreach (FMOD.DSP d in _dspList) { r = channel.addDSP(d, ref conn); // TODO errcheck } // go - start playing if (r == FMOD.RESULT.OK) { r = channel.setPaused(false); } } } // if return channel; }