Beispiel #1
0
 public RunCommand(RenderParams renderParams = RenderParams.AsSlash)
 {
     _params = new ParameterDictionary();
     Arguments = string.Empty;
     _renderParams = renderParams;
     Out = Console.Out;
     Error = Console.Error;
 }
        public override bool Render(RenderParams parentRp, RenderCanvas canvas)
        {
            if (!Active) return false;
            RenderParams rp = new RenderParams(parentRp);
            AdaptRenderParams(rp);
            RenderCanvas myCanvas = new RenderCanvas();
            RenderChildren(rp, myCanvas);

            // render my audio or signal or ... if within the right time
            if (rp.Time >= 0 && (rp.Time < (Duration + RENDER_SAFETY_MARGIN_SEC)) && _dsp != null)
            {
                RenderOsc(rp, myCanvas);
                return true;
            }
            return false;
        }
        /**
        * called internally from Render() method, by any event that needs/wants to render its child events
        */
        internal override bool RenderChildren(RenderParams rp, RenderCanvas canvas)
        {
            if (_children.Count() == 0)
                return false;

            // check if a child for rendering is already selected: if not, select one
            if (_childSelected == null)
            {
                // select one child effect random or in sequence, and see if it has to be played now
                double myRpTime = rp.Time;
                Random rnd = new Random();
                int idx = rnd.Next(_children.Count());
                KeyValuePair<double, List<SoundEvent>> kvPair = _children.ElementAt(idx);
                List<SoundEvent> evs = kvPair.Value;
                idx = rnd.Next(evs.Count());
                _childSelected = evs.ElementAt(idx);
            }

            SoundEvent ev = _childSelected;

            // check if we are in the time range where the effect can work
            double evEndTime = 0.0 + ev.Duration; ///_timeSpeedupFactor ;
            if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time)  // if end time lies in the future...
            {
                // --render the child effect, shifted in time/pan/amplitude by use of rp.
                RenderParams rpChild = new RenderParams(rp);
                rpChild.Time = (rp.Time - 0.0);  // only time is set for each child separately. Rest is same.
                bool wasActive= ev.Render(rpChild, canvas);
                if (!wasActive)
                {
                    _childSelected = null; // reset back - next time, another event may be selected into _childSelected
                }
                return wasActive;
            }
            else
            {
                _childSelected = null; // reset back - next time, another event may be selected into _childSelected
            }
            return false;
        }
        public override bool Render(RenderParams parentRp, RenderCanvas canvas)
        {
            if (!Active) return false; // skip if not active
            RenderParams rp = new RenderParams(parentRp);
            AdaptRenderParams(rp);
            RenderChildren(rp, canvas);

            if (rp.Time >= 0 && rp.Time < Duration && _sig != null)
            {
                double s = _sig.Render(rp.Time);
                switch (_modif)
                {
                    case Modifier.DSP_PARAM:
                        if (_parentsDSP != null)
                        {
                            _parentsDSP.SetParameter(_paramNumber, s);
                        }
                        if (_parentsOsc != null)
                        {
                            _parentsOsc.SetParameter(_paramNumber, s);
                        }

                        break;
                    case Modifier.NONE:
                        break;
                    case Modifier.AMPLITUDE:
                        canvas.AmplMultiply *= s;
                        break;
                    case Modifier.AMPLITUDE_ADDITIVE:
                        canvas.AmplMultiply += s;
                        break;
                    case Modifier.PAN:
                        canvas.Pan += s;
                        break;
                }
                return true;
            }
            return false;
        }
        public override bool Render(RenderParams parentRp, RenderCanvas canvas)
        {
            if (!Active) return false;
            RenderParams rp = new RenderParams(parentRp);
            AdaptRenderParams(rp);
            RenderChildren(rp, canvas);

            // enable dsp effect only when in right time range
            if (_dsp != null)
            {
                if (rp.Time >= 0 && rp.Time < Duration)
                {
                    _dsp.setBypass(false);
                    return true;
                }
                else
                {
                    _dsp.setBypass(true);
                }
            }
            return false;
        }
 /**
  * adapt a running sample according to renderparams and canvas result
  */
 internal void AdaptChannelSettings(FMOD.Channel channel, RenderParams rp, RenderCanvas canvas)
 {
     double a = rp.Ampl * canvas.AmplMultiply + canvas.AmplAdd;
     double p = rp.Pan + canvas.Pan;
     channel.setVolume((float)a);
     channel.setPan((float)p);
 }
        FMOD.Channel PlayOsc(RenderParams rp, RenderCanvas canvas)
        {
            FMOD.Channel channel = null;
            FMOD.RESULT r = MusicEngine.AudioEngine.playDSP(FMOD.CHANNELINDEX.FREE, _dsp, true, ref channel);
            if (r == FMOD.RESULT.OK && channel != null)
            {
                // set regular play properties
                AdaptChannelSettings(channel, rp, canvas);

                // set optional DSP unit(s) on channel
                if (r == FMOD.RESULT.OK)
                {
                    FMOD.DSPConnection conn = null;
                    foreach (FMOD.DSP d in _dspList)
                    {
                        r = channel.addDSP(d, ref conn); // TODO errcheck
                    }

                    // go - start playing
                    if (r == FMOD.RESULT.OK)
                    {
                        r = channel.setPaused(false);
                    }

                }
            } // if

            return channel;
        }
        internal void RenderOsc(RenderParams rp, RenderCanvas canvas)
        {
            //Util.Log("RenOsc HID=" + rp.HierarchyID + " T=" + Math.Round(rp.Time, 3) + " AbsT=" + Math.Round(rp.AbsTime, 3) + " A=" + Math.Round(rp.Ampl, 3) + "\n");
            bool wasPlaying = nowPlayingList.ContainsKey(rp.HierarchyID);
            FMOD.Channel channel = null;
            FMOD.RESULT r;

            if (wasPlaying)
            {
                channel = nowPlayingList[rp.HierarchyID];
                // check if still playing now
                bool isPlayingNow = false;
                r = channel.isPlaying(ref isPlayingNow);
                if (isPlayingNow)
                {
                    // check if should be stopped now
                    if (rp.Time >= Duration)
                    {
                        channel.stop();
                        nowPlayingList.Remove(rp.HierarchyID);
                    }
                    else
                    {
                        // if still validly playing, adapt channel properties only.
                        AdaptChannelSettings(channel, rp, canvas);
                        //Util.Log("     rp.A=" + Math.Round(rp.Ampl, 3) + " canv.A=" + canvas.AmplMultiply + "\n");
                    }
                }
                else
                {   // if not anymore, remove from list
                    nowPlayingList.Remove(rp.HierarchyID);
                }
            }
            else
            {   // was not playing but should be rendered - hence, initiate playing now
                if (rp.Time < Duration - 0.100) // extra safety margin - do not start if close to end.
                {

                    channel = PlayOsc(rp, canvas);
                    if (channel != null)
                    {
                        // store playing sound in the table
                        nowPlayingList[rp.HierarchyID] = channel;
                    }
                    else
                    {
                        //
                    }

                }
            }
        }
        public void Render(RenderParams rp)
        {
            rp.HeightTexture.Bind(TextureUnit.Texture3);
            rp.ShadeTexture.Bind(TextureUnit.Texture4);
            //rp.CloudTexture.Bind(TextureUnit.Texture4);
            //rp.CloudDepthTexture.Bind(TextureUnit.Texture5);
            rp.IndirectIlluminationTexture.Bind(TextureUnit.Texture5);
            rp.SkyCubeTexture.Bind(TextureUnit.Texture6);
            rp.DepthTexture.Bind(TextureUnit.Texture7);
            rp.MiscTexture.Bind(TextureUnit.Texture8);
            rp.MiscTexture2.Bind(TextureUnit.Texture9);

            this.gbufferCombiner.Render(projection, modelview, (sp) =>
            {
                sp.SetUniform("pre_projection_matrix", rp.GBufferProjectionMatrix);
                sp.SetUniform("eyePos", rp.EyePos);
                sp.SetUniform("sunVector", rp.SunDirection);
                sp.SetUniform("paramTex", 0);
                sp.SetUniform("normalTex", 1);
                sp.SetUniform("normalLargeScaleTex", 2);
                sp.SetUniform("heightTex", 3);
                sp.SetUniform("shadeTex", 4);
                sp.SetUniform("indirectTex", 5);
                sp.SetUniform("skyCubeTex", 6);
                sp.SetUniform("depthTex", 7);
                sp.SetUniform("miscTex", 8);
                sp.SetUniform("miscTex2", 9);
                sp.SetUniform("minHeight", rp.MinHeight);
                sp.SetUniform("maxHeight", rp.MaxHeight);
                sp.SetUniform("exposure", rp.Exposure);
                sp.SetUniform("Kr", rp.Kr);
                sp.SetUniform("sunLight", rp.SunLight);
                sp.SetUniform("scatterAbsorb", rp.ScatterAbsorb);
                sp.SetUniform("mieBrightness", rp.MieBrightness);
                sp.SetUniform("miePhase", rp.MiePhase);
                sp.SetUniform("raleighBrightness", rp.RaleighBrightness);
                sp.SetUniform("skylightBrightness", rp.SkylightBrightness);
                sp.SetUniform("groundLevel", rp.GroundLevel);
                sp.SetUniform("sampleDistanceFactor", rp.SampleDistanceFactor);
                sp.SetUniform("nearScatterDistance", rp.NearScatterDistance);
                sp.SetUniform("nearMieBrightness", rp.NearMieBrightness);
                sp.SetUniform("aoInfluenceHeight", rp.AOInfluenceHeight);
                sp.SetUniform("ambientBias", rp.AmbientBias);
                sp.SetUniform("indirectBias", rp.IndirectBias);
                sp.SetUniform("renderMode", rp.RenderMode);
                sp.SetUniform("snowSlopeDepthAdjust", rp.SnowSlopeDepthAdjust);

                sp.SetUniform("normalBlendNearDistance", rp.NormalBlendNearDistance);
                sp.SetUniform("normalBlendFarDistance", rp.NormalBlendFarDistance);

                sp.SetUniform("scatteringInitialStepSize", rp.ScatteringInitialStepSize);
                sp.SetUniform("scatteringStepGrowthFactor", rp.ScatteringStepGrowthFactor);

                sp.SetUniform("time", rp.Time);

                sp.SetUniform("boxparam", new Vector4((float)rp.TileWidth, (float)rp.TileHeight, 0.0f, 1.0f));
            });
        }
        /**
         * called internally from Render() method, by any event that needs/wants to render its child events
         */
        internal virtual bool RenderChildren(RenderParams rp, RenderCanvas canvas)
        {
            if (_children.Count() == 0)
                return false;

            // loop all child effects and see if they have to be played now
            bool wasActive = false;
            foreach (KeyValuePair<double, List<SoundEvent>> pair in _children)
            {
                double evStartTime = pair.Key ;

                // check if effect lies in the future. In this case, we can break now. All further child effects
                // will be even later in time so we do not need to iterate these further
                if (evStartTime > rp.Time + rp.RenderAheadTime ) break;

                // loop all events at that specific time 't'
                foreach (SoundEvent ev in pair.Value)
                {
                    // check if we are in the time range where the effect can work
                    double evEndTime = evStartTime + ev.Duration; ///_timeSpeedupFactor ;
                    if (evEndTime + RENDER_SAFETY_MARGIN_SEC > rp.Time )  // if end time lies in the future...
                    {
                        // --render the child effect, shifted in time/pan/amplitude by use of rp.
                        RenderParams rpChild = new RenderParams(rp);
                        rpChild.Time = (rp.Time - evStartTime);  // only time is set for each child separately. Rest is same.
                        bool wasChildActive = ev.Render(rpChild, canvas);
                        if (wasChildActive)
                            wasActive = true;
                    }
                }
            }// foreach over children
            return wasActive;
        }
        /**
         * adapt the RenderParams using specific fixed param fields defined in this event.
         * Such as pan, volume, Time (in case of repeats) and a new hierarchy-id.
         */
        internal void AdaptRenderParams(RenderParams rp)
        {
            // adapt rendering-params UID for rendering/children
            rp.HierarchyID = Util.HashValues(rp.HierarchyID, _id);

            // adapt render params with instructions from this event (also passed along to children via rp)
            rp.Pan += _pan;
            rp.Ampl *= _ampl;
            rp.Time *= _speed;

            // if repeats are active, 'wind back' time where needed so that event is played again and again
            if (_repeats > 1 && rp.Time < Duration)
            {
                double Nwindbacks = Math.Floor(rp.Time / _duration);
                rp.Time -= _duration * Nwindbacks;
            }
        }
 /**
  * Render() method entry point for any SoundEvent.
  * returns the adapted canvas, upon which events may 'draw' their output. (e.g. modifiers)
  * Child classes will typically override Render() and have their own implementation.
  * Return true if effect was active and activated at this time, false if not active or activated at this time.
  */
 public virtual bool Render(RenderParams parentRp, RenderCanvas canvas)
 {
     if (!_isActive) return false; // skip if not active
     _rp = new RenderParams(parentRp);
     AdaptRenderParams(_rp);
     return RenderChildren(_rp, canvas);
 }
 public void Parse(GameBitBuffer buffer)
 {
     Field0 = buffer.ReadInt(32);
     Field1 = buffer.ReadInt(32);
     Field2 = new RenderParams();
     Field2.Parse(buffer);
     Field3 = buffer.ReadInt(32);
     Field4 = new TextureStageParams[16];
     for(int i = 0;i < _Field4.Length;i++)
     {
         _Field4[i] = new TextureStageParams();
         _Field4[i].Parse(buffer);
     }
     Field5 = buffer.ReadInt(32);
     Field6 = buffer.ReadCharArray(256);
     Field7 = buffer.ReadCharArray(64);
     Field8 = buffer.ReadCharArray(64);
     serTagMap = new SerializeData();
     serTagMap.Parse(buffer);
     hTagMap = new DT_TAGMAP();
     hTagMap.Parse(buffer);
 }
Beispiel #14
0
 public float Render(Graphics g, ref RenderParams rp)
 {
     return Traceback.Render(g, ref rp, ToString(false));
 }