public ObservableModel(IVisualArrayGenerator visualArrayGenerator, IBeliefState beliefState,
     IActivation activation)
 {
     _visualArrayGenerator = visualArrayGenerator;
     _beliefState = beliefState;
     _activation = activation;
 }
Example #2
0
 public override void addActivation(IActivation act)
 {
     if (act is LinkedActivation)
     {
         LinkedActivation newact = (LinkedActivation) act;
         if (lazy)
         {
             if (count == 0)
             {
                 first = newact;
                 last = newact;
             }
             else
             {
                 last.Next = newact;
                 last = newact;
             }
             count++;
         }
         else
         {
             if (count > 0)
             {
                 quickSort(newact);
             }
             else if (count == 0)
             {
                 first = newact;
                 last = newact;
             }
             count++;
         }
     }
 }
Example #3
0
 /// <summary>
 /// The method first compares the salience. If the salience is equal,
 /// we then compare the aggregate time.
 /// </summary>
 /// <param name="left"></param>
 /// <param name="right"></param>
 /// <returns></returns>
 public virtual int compare(IActivation left, IActivation right)
 {
     if (left.Rule.Salience == right.Rule.Salience)
     {
         // Since Sumatra does not propogate based on natural order, we
         // don't use the Activation timestamp. Instead, we use the
         // aggregate time.
         if (left.AggregateTime == right.AggregateTime)
         {
             return 0;
         }
         else
         {
             if (left.AggregateTime > right.AggregateTime)
             {
                 return - 1;
             }
             else
             {
                 return 1;
             }
         }
     }
     else
     {
         if (left.Rule.Salience > right.Rule.Salience)
         {
             return 1;
         }
         else
         {
             return - 1;
         }
     }
 }
Example #4
0
 /// <summary> The method first compares the salience. If the salience is equal,
 /// we then compare the aggregate time.
 /// </summary>
 /// <param name="">left
 /// </param>
 /// <param name="">right
 /// </param>
 /// <returns>
 /// 
 /// </returns>
 public virtual int compare(IActivation left, IActivation right)
 {
     if (right != null)
     {
         if (left.Rule.Salience == right.Rule.Salience)
         {
             // we compare the facts based on how recent it is
             return compareRecency(left, right);
         }
         else
         {
             if (left.Rule.Salience > right.Rule.Salience)
             {
                 return 1;
             }
             else
             {
                 return - 1;
             }
         }
     }
     else
     {
         return 1;
     }
 }
Example #5
0
        public Perceptron(IActivation activationFunction = null)
        {
            var ActivationFunction = activationFunction ?? new TanhSigmoidActivation();

            ComputeFunc = ActivationFunction.ComputeFunc;
            GradientFunc = ActivationFunction.GradientFunc;
            InConnections = new List<IConnection>();
            OutConnections = new List<IConnection>();
        }
 public ObservableModelForBubble(IVisualArrayGenerator visualArrayGenerator, IBeliefStateForControls beliefState,
     IActivation activation, IBubbleDataRecorder activationDataRecorder)
 {
     _visualArrayGenerator = visualArrayGenerator;
     _beliefState = beliefState;
     _activation = activation;
     _activationDataRecorder = activationDataRecorder;
     _numberOfFixation = 0;
 }
Example #7
0
        public void RaiseActivationCreated(ISession session, IActivation activation)
        {
            var handler = ActivationCreatedEvent;

            if (handler != null)
            {
                var @event = new AgendaEventArgs(activation);
                handler(session, @event);
            }
            _parent?.RaiseActivationCreated(session, activation);
        }
Example #8
0
 public void AddLayer(int neuronCount, IActivation activation)
 {
     if (layers.Count == 0)
     {
         AddInputLayer(neuronCount);
     }
     else
     {
         AddWeightedLayer(neuronCount, activation, DEFAULT_HAS_BIAS);
     }
 }
 public void AddLayer(int numberOfNeurons, IActivation activation)
 {
     if (layers.Count == 0)
     {
         layers.Add(new InputLayer(numberOfNeurons));
     }
     else
     {
         layers.Add(new HiddenLayer(numberOfNeurons, layers[layers.Count - 1], activation));
     }
 }
Example #10
0
 public Neuron(IActivation activationFunction, INetwork networkType)
 {
     this.activationFunction = activationFunction;
     if (networkType is null)
     {
         this.networkType = new ClassificationNetwork();
     }
     else
     {
         this.networkType = networkType;
     }
 }
Example #11
0
        public Layer(int numberOfNeuronsInPreviousLayer, int numberOfNeurons, ILayerInitializer layerInitializer,
                     IActivation outputActivation)
        {
            Weights = Matrix <double> .Build.Dense(numberOfNeuronsInPreviousLayer, numberOfNeurons, layerInitializer.GetWeight);

            Biases = Vector <double> .Build.Dense(numberOfNeurons, layerInitializer.GetBias);

            OutputActivation = outputActivation;

            previousDeltaWeights = Matrix <double> .Build.Dense(Weights.RowCount, Weights.ColumnCount);

            previousDeltaBiases = Vector <double> .Build.Dense(Biases.Count);
        }
            public Layer(uint n_inputs, uint n_outputs, IActivation act)
            {
                activation = act;
                random     = new Random();
                List <Neuron> neuronsList = new List <Neuron>();

                for (int i = 0; i < n_outputs; i++)
                {
                    neuronsList.Add(new Neuron((int)n_inputs, random, 0));
                }
                this.neurons = neuronsList.ToArray();
                this.outputs = new float[n_outputs];
            }
Example #13
0
        }                                        //layer local error


        /// <summary>
        /// The default constructor, specifying the activation function of the neurons in the layer, a boolean to add a bias or not and the number of neurons [not including the bias]
        /// </summary>
        /// <param name="_activation">the activaion function for all of the neurons in the layer</param>
        /// <param name="_bias">set to true, will add a bias unit</param>
        /// <param name="_numberOfNeurons">the number of the neurons in the layer</param>
        public Layer(IActivation _activation, bool _bias, int _numberOfNeurons)
        {
            Activation      = _activation;
            Bias            = _bias;
            NumberOfNeurons = _numberOfNeurons;

            /*  int addBias = 0;
             * if (Bias)
             * {
             *    addBias = 1;
             * }
             * NumberOfNeurons += addBias;*/
        }
Example #14
0
        public Layer(double[,] inputWeights, double[] biases, IActivation outputActivation)
        {
            Weights = Matrix <double> .Build.DenseOfArray(inputWeights);

            Biases = Vector <double> .Build.DenseOfArray(biases);

            OutputActivation = outputActivation;

            WeightGradients = Matrix <double> .Build.Dense(Weights.RowCount, Weights.ColumnCount);

            previousDeltaWeights = Matrix <double> .Build.Dense(Weights.RowCount, Weights.ColumnCount);

            previousDeltaBiases = Vector <double> .Build.Dense(Biases.Count);
        }
Example #15
0
 public MLP(IActivation activationFunction, params int[] numNeuronInLayers)
 {
     if (numNeuronInLayers == null || numNeuronInLayers.Length < 2)
         throw new ArgumentException("At Least 2 Layers should have neuron count.");
     //input layer
     var il = new InputLayer(numNeuronInLayers[0]);
     //hidden layer with bias
     var layers = numNeuronInLayers.Skip(1).Take(numNeuronInLayers.Length - 2).
         Select(numNeuronInLayer => new PerceptronLayer(numNeuronInLayer, activationFunction)).
         Cast<INodeLayer>().ToList();
     //output layer without bias
     layers.Add(new PerceptronLayer(numNeuronInLayers.Last(), activationFunction, false));
     Create(il, layers.ToArray());
 }
Example #16
0
        public RecursiveNetwork(Type type, IActivation activation, bool useBiases, int inputNeurons, int hiddenNeurons, int outputNeurons)
        {
            IsInitialized = false;
            Activation    = activation;
            NetworkType   = type;

            inputLayer   = new InputLayer(inputNeurons);
            hiddenLayer  = new WeightedLayer(hiddenNeurons, activation, useBiases);
            contextLayer = new ContextLayer(NetworkType == Type.Jordan ? outputNeurons : hiddenNeurons);
            outputLayer  = new WeightedLayer(outputNeurons, activation, useBiases);

            hiddenLayer.ConnectFrom(inputLayer, contextLayer);
            outputLayer.ConnectFrom(hiddenLayer);
        }
Example #17
0
        public void RaiseRuleFired(ISession session, IActivation activation)
        {
            var handler = RuleFiredEvent;

            if (handler != null)
            {
                var @event = new AgendaEventArgs(activation);
                handler(session, @event);
            }
            if (_parent != null)
            {
                _parent.RaiseRuleFired(session, activation);
            }
        }
Example #18
0
 /// <summary>
 /// Initialize the basic LSTM cell.
 /// </summary>
 /// <param name="num_units">The number of units in the LSTM cell.</param>
 /// <param name="forget_bias"></param>
 /// <param name="state_is_tuple"></param>
 /// <param name="activation"></param>
 /// <param name="reuse"></param>
 /// <param name="name"></param>
 /// <param name="dtype"></param>
 public BasicLstmCell(int num_units, float forget_bias = 1.0f, bool state_is_tuple = true,
                      IActivation activation           = null, bool?reuse = null, string name = null,
                      TF_DataType dtype = TF_DataType.DtInvalid) : base(_reuse: reuse, name: name, dtype: dtype)
 {
     input_spec      = new InputSpec(ndim: 2);
     _num_units      = num_units;
     _forget_bias    = forget_bias;
     _state_is_tuple = state_is_tuple;
     _activation     = activation;
     if (_activation == null)
     {
         _activation = tf.nn.tanh();
     }
 }
Example #19
0
 public Conv2D(int filters,
               int[] kernel_size,
               int[] strides                   = null,
               string padding                  = "valid",
               string data_format              = "channels_last",
               int[] dilation_rate             = null,
               IActivation activation          = null,
               bool use_bias                   = true,
               IInitializer kernel_initializer = null,
               IInitializer bias_initializer   = null,
               bool trainable                  = true,
               string name = null)
 {
 }
Example #20
0
 public Dense(int units,
              IActivation activation,
              bool use_bias  = true,
              bool trainable = false,
              IInitializer kernel_initializer = null,
              IInitializer bias_initializer   = null) : base(trainable: trainable)
 {
     this.units              = units;
     this.activation         = activation;
     this.use_bias           = use_bias;
     this.kernel_initializer = kernel_initializer;
     this.bias_initializer   = bias_initializer;
     this.supports_masking   = true;
     this.input_spec         = new InputSpec(min_ndim: 2);
 }
Example #21
0
 /// <summary> Method is called to Remove an activation from the agenda.
 /// </summary>
 /// <param name="">actv
 ///
 /// </param>
 public virtual void removeActivation(IActivation actv)
 {
     if (profRm)
     {
         removeActivationWProfile(actv);
     }
     else
     {
         if (watch_Renamed_Field)
         {
             engine.writeMessage("<= " + actv.toPPString() + "\r\n", "t");
         }
         actv.Rule.Module.removeActivation(actv);
     }
 }
Example #22
0
        /* (non-Javadoc)
         * @see woolfel.engine.rete.Activation#compare(woolfel.engine.rete.Activation)
         */

        /// <summary>
        /// If the activation passed in the parameter has the same rule
        /// and facts, the method should return true
        /// </summary>
        /// <param name="act">The act.</param>
        /// <returns></returns>
        public bool compare(IActivation act)
        {
            if (act == this)
            {
                return(true);
            }
            if (act.Rule == theRule && act.Index.Equals(index))
            {
                return(true);
            }
            else
            {
                return(false);
            }
        }
 public FasterRCNNResnet101FeatureExtractor(bool is_training,
                                            int first_stage_features_stride,
                                            bool batch_norm_trainable = false,
                                            bool reuse_weights        = false,
                                            float weight_decay        = 0.0f,
                                            IActivation activation_fn = null) : base("resnet_v1_101",
                                                                                     ResNetV1.resnet_v1_101,
                                                                                     is_training,
                                                                                     first_stage_features_stride,
                                                                                     batch_norm_trainable: batch_norm_trainable,
                                                                                     reuse_weights: reuse_weights,
                                                                                     weight_decay: weight_decay,
                                                                                     activation_fn: activation_fn)
 {
 }
Example #24
0
        public void AddLayer(int neuronCount, IActivation activation, bool hasBias)
        {
            if (layers.Count == 0 && hasBias)
            {
                throw new InvalidOperationException("Input layer cannot have bias.");
            }

            if (layers.Count == 0)
            {
                AddInputLayer(neuronCount);
            }
            else
            {
                AddWeightedLayer(neuronCount, activation, hasBias);
            }
        }
Example #25
0
    private void OnCollisionEnter(Collision collision)
    {
        IBreakable breakable = collision.gameObject.GetComponent <IBreakable>();

        if (breakable != null)
        {
            breakable.BreakObject();
        }

        IActivation activationObject = collision.gameObject.GetComponent <IActivation>();

        if (activationObject != null)
        {
            activationObject.DoActivate();
        }
    }
Example #26
0
            public static Tensor conv2d(Tensor inputs,
                                        int filters,
                                        int[] kernel_size,
                                        int[] strides                   = null,
                                        string padding                  = "valid",
                                        string data_format              = "channels_last",
                                        int[] dilation_rate             = null,
                                        bool use_bias                   = true,
                                        IActivation activation          = null,
                                        IInitializer kernel_initializer = null,
                                        IInitializer bias_initializer   = null,
                                        bool trainable                  = true,
                                        string name = null)
            {
                if (strides == null)
                {
                    strides = new int[] { 1, 1 }
                }
                ;
                if (dilation_rate == null)
                {
                    dilation_rate = new int[] { 1, 1 }
                }
                ;
                if (bias_initializer == null)
                {
                    bias_initializer = tf.zeros_initializer;
                }

                var layer = new Conv2D(filters,
                                       kernel_size: kernel_size,
                                       strides: strides,
                                       padding: padding,
                                       data_format: data_format,
                                       dilation_rate: dilation_rate,
                                       activation: activation,
                                       use_bias: use_bias,
                                       kernel_initializer: kernel_initializer,
                                       bias_initializer: bias_initializer,
                                       trainable: trainable,
                                       name: name);

                return(layer.apply(inputs));
            }
        }
    }
Example #27
0
        public FeedForwardLayer(ExecutionContext executionContext, FeedForwardLayerOptions options, ushort[] previousLayerDimensionality)
        {
            _options                = options;
            Dimensionality          = options.Dimensionality;
            _weightLength           = MatrixHelpers.GetWeightCardinality(previousLayerDimensionality, options.Dimensionality);
            NodeCount               = MatrixHelpers.GetCardinality(options.Dimensionality);
            _previousLayerNodeCount = MatrixHelpers.GetCardinality(previousLayerDimensionality);
            _activation             = (options.ActivationOptions ?? new ReluActivationOptions())
                                      .Create();
            _update = (options.UpdateOptions ?? new FeedForwardUpdateOptions())
                      .Create(executionContext);

            CompileKernels(executionContext);
            AllocateBuffers(executionContext, options);
            SetForwardPassArgs();
            SetBackwardPassArgs();
        }
 public RandNeuralGenomeGeneratorBase(
     Random random_,
     IActivation activator,
     double weightRange,
     int inputCount,
     int outputCount,
     int[] hiddenLayers,
     bool createBias)
 {
     RandomInst   = random_;
     Activator    = activator;
     WeightRange  = weightRange;
     InputCount   = inputCount;
     OutputCount  = outputCount;
     HiddenLayers = hiddenLayers;
     CreateBias   = createBias;
 }
        /// <summary>
        /// Activation layer. Adds activation functions to a neural net.
        /// </summary>
        /// <param name="activation"></param>
        public ActivationLayer(Activation activation)
        {
            ActivationFunc = activation;

            switch (activation)
            {
            case Activation.Undefined:
                throw new ArgumentException("ActivationLayer must have a defined activation function. Provided with: " + activation);

            case Activation.Relu:
                m_activation = new ReluActivation();
                break;

            default:
                throw new ArgumentException("Unsupported activation type: " + activation);
            }
        }
Example #30
0
        /// <summary> compare will look to see which activation has more facts.
        /// it will first compare the timestamp of the facts. If the facts
        /// are equal, it will return the activation with more facts.
        /// </summary>
        /// <param name="">left
        /// </param>
        /// <param name="">right
        /// </param>
        /// <returns>
        ///
        /// </returns>
        protected internal virtual int compareRecency(IActivation left, IActivation right)
        {
            IFact[] lfacts  = left.Facts;
            IFact[] rfacts  = right.Facts;
            int     len     = lfacts.Length;
            int     compare = 0;

            if (rfacts.Length < len)
            {
                len = rfacts.Length;
            }
            // first we compare the time stamp
            for (int idx = 0; idx < len; idx++)
            {
                if (lfacts[idx].timeStamp() > rfacts[idx].timeStamp())
                {
                    return(1);
                }
                else if (lfacts[idx].timeStamp() < rfacts[idx].timeStamp())
                {
                    return(-1);
                }
            }
            // the activation with more facts has a higher priority
            if (lfacts.Length > rfacts.Length)
            {
                return(1);
            }
            else if (lfacts.Length < rfacts.Length)
            {
                return(-1);
            }
            // Current we compare the fact id
            for (int idx = 0; idx < len; idx++)
            {
                if (lfacts[idx].FactId > rfacts[idx].FactId)
                {
                    return(1);
                }
                else if (lfacts[idx].FactId < rfacts[idx].FactId)
                {
                    return(-1);
                }
            }
            return(0);
        }
Example #31
0
        public MLP(IActivation activationFunction, params int[] numNeuronInLayers)
        {
            if (numNeuronInLayers == null || numNeuronInLayers.Length < 2)
            {
                throw new ArgumentException("At Least 2 Layers should have neuron count.");
            }
            //input layer
            var il = new InputLayer(numNeuronInLayers[0]);
            //hidden layer with bias
            var layers = numNeuronInLayers.Skip(1).Take(numNeuronInLayers.Length - 2).
                         Select(numNeuronInLayer => new PerceptronLayer(numNeuronInLayer, activationFunction)).
                         Cast <INodeLayer>().ToList();

            //output layer without bias
            layers.Add(new PerceptronLayer(numNeuronInLayers.Last(), activationFunction, false));
            Create(il, layers.ToArray());
        }
Example #32
0
 /// <summary> Add an activation to the agenda.
 /// </summary>
 /// <param name="">actv
 ///
 /// </param>
 public virtual void addActivation(IActivation actv)
 {
     // the implementation should Get the current focus from Rete
     // and then Add the activation to the Module.
     if (profAdd)
     {
         addActivationWProfile(actv);
     }
     else
     {
         if (watch_Renamed_Field)
         {
             engine.writeMessage("=> " + actv.toPPString() + "\r\n", "t");
         }
         actv.Rule.Module.addActivation(actv);
     }
 }
Example #33
0
 public FasterRCNNResnetV1FeatureExtractor(string architecture,
                                           Action resnet_model,
                                           bool is_training,
                                           int first_stage_features_stride,
                                           bool batch_norm_trainable = false,
                                           bool reuse_weights        = false,
                                           float weight_decay        = 0.0f,
                                           IActivation activation_fn = null) : base(is_training,
                                                                                    first_stage_features_stride,
                                                                                    batch_norm_trainable: batch_norm_trainable,
                                                                                    reuse_weights: reuse_weights,
                                                                                    weight_decay: weight_decay)
 {
     if (activation_fn == null)
     {
         activation_fn = tf.nn.relu();
     }
 }
Example #34
0
 /// <summary> removeActivation will check to see if the activation is
 /// the first or last before removing it.
 /// </summary>
 public override IActivation removeActivation(IActivation act)
 {
     if (act is LinkedActivation)
     {
         LinkedActivation lact = (LinkedActivation)act;
         if (first == lact)
         {
             first = lact.Next;
         }
         if (last == lact)
         {
             last = lact.Previous;
         }
         count--;
         lact.remove();
     }
     return(act);
 }
Example #35
0
        Tensor conv2d(Tensor inputs, int filters, string padding, bool batchNorm, string name)
        {
            var kernel_initializer = tf.variance_scaling_initializer();
            var bias_initializer   = tf.constant_initializer(value: 0);
            //Tensor top = null;
            IActivation activation = batchNorm ? null : tf.nn.relu();
            var         top        = tf.layers.conv2d(inputs, filters, kernel_size: new int[] { 3, 3 }, padding: padding,
                                                      activation: activation, kernel_initializer: kernel_initializer,
                                                      bias_initializer: bias_initializer, name: name);

            if (batchNorm)
            {
                var training = tf.placeholder(tf.@bool, name: "training");
                top = tf.layers.batch_normalization(top, axis: 3, trainable: this.trainable, training: training, name: name);
                top = tf.nn.relu(top, name: name + "_relu");
            }
            return(top);
        }
        // Example of how to create a neuron layer from scratch, use tf.layers.dense instead
        public static Tensor NeuronLayer(Tensor X, int nNeurons, string name, IActivation activation = null)
        {
            using (tf.name_scope(name))
            {
                int         nInputs = X.shape[1];
                NDArray     stddev  = 2 / np.sqrt(nInputs);
                Tensor      init    = tf.truncated_normal(new[] { nInputs, nNeurons }, stddev: stddev);
                RefVariable W       = tf.Variable(init, name: "kernel");
                RefVariable b       = tf.Variable(tf.zeros(new[] { nNeurons }), name: "bias");
                Tensor      Z       = tf.matmul(X, W) + b;

                if (activation != null)
                {
                    return(activation.Activate(Z));
                }

                return(Z);
            }
        }
Example #37
0
 /// <summary> The method first compares the salience. If the salience is equal,
 /// we then compare the aggregate time.
 /// </summary>
 /// <param name="">left
 /// </param>
 /// <param name="">right
 /// </param>
 /// <returns>
 ///
 /// </returns>
 public virtual int compare(IActivation left, IActivation right)
 {
     if (right != null)
     {
         if (left.Rule.Salience == right.Rule.Salience)
         {
             // Since Sumatra does not propogate based on natural order, we
             // don't use the Activation timestamp. Instead, we use the
             // aggregate time.
             if (left.AggregateTime == right.AggregateTime)
             {
                 return(0);
             }
             else
             {
                 if (left.AggregateTime > right.AggregateTime)
                 {
                     return(1);
                 }
                 else
                 {
                     return(-1);
                 }
             }
         }
         else
         {
             if (left.Rule.Salience > right.Rule.Salience)
             {
                 return(1);
             }
             else
             {
                 return(-1);
             }
         }
     }
     else
     {
         return(1);
     }
 }
Example #38
0
 /// <summary> if the profiling is turned on for Remove, the method is
 /// called to Remove activations.
 /// </summary>
 /// <param name="">actv
 /// 
 /// </param>
 public virtual void removeActivationWProfile(IActivation actv)
 {
     ProfileStats.startRemoveActivation();
     actv.Rule.Module.removeActivation(actv);
     ProfileStats.endRemoveActivation();
 }
Example #39
0
 /// <summary> Add an activation to the agenda.
 /// </summary>
 /// <param name="">actv
 /// 
 /// </param>
 public virtual void addActivation(IActivation actv)
 {
     // the implementation should Get the current focus from Rete
     // and then Add the activation to the Module.
     if (profAdd)
     {
         addActivationWProfile(actv);
     }
     else
     {
         if (watch_Renamed_Field)
         {
             engine.writeMessage("=> " + actv.toPPString() + "\r\n", "t");
         }
         actv.Rule.Module.addActivation(actv);
     }
 }
Example #40
0
 /// <summary> if profiling is turned on, the method is called to Add
 /// new activations to the agenda
 /// </summary>
 /// <param name="">actv
 /// 
 /// </param>
 public virtual void addActivationWProfile(IActivation actv)
 {
     ProfileStats.startAddActivation();
     actv.Rule.Module.addActivation(actv);
     ProfileStats.endAddActivation();
 }
Example #41
0
 /// <summary> Method is called to Remove an activation from the agenda.
 /// </summary>
 /// <param name="">actv
 /// 
 /// </param>
 public virtual void removeActivation(IActivation actv)
 {
     if (profRm)
     {
         removeActivationWProfile(actv);
     }
     else
     {
         if (watch_Renamed_Field)
         {
             engine.writeMessage("<= " + actv.toPPString() + "\r\n", "t");
         }
         actv.Rule.Module.removeActivation(actv);
     }
 }
Example #42
0
 /// <summary> The method should Get the agenda and use it to Add the new
 /// activation to the agenda
 /// </summary>
 /// <param name="">actv
 /// 
 /// </param>
 public virtual void addActivation(IActivation actv)
 {
     activations.addActivation(actv);
 }
Example #43
0
 /// <summary> Remove an activation from the list
 /// </summary>
 /// <param name="">actv
 /// </param>
 /// <returns>
 /// 
 /// </returns>
 public virtual IActivation removeActivation(IActivation actv)
 {
     return (IActivation) activations.removeActivation(actv);
 }
Example #44
0
 public abstract void addActivation(IActivation act);
Example #45
0
 /// <summary>
 /// method is used to fire an activation immediately
 /// </summary>
 /// <param name="act">The act.</param>
 protected internal virtual void fireActivation(IActivation act)
 {
     if (act != null)
     {
         try
         {
             pushScope(act.Rule);
             act.executeActivation(this);
             //act.clear(); TODO HACK
             popScope();
             firingcount++;
             addRuleFired(act.Rule);
             act.clear();
         }
         catch (ExecuteException e)
         {
             TraceLogger.Instance.Debug(e);
         }
     }
 }
Example #46
0
 public abstract IActivation removeActivation(IActivation act);
Example #47
0
 /// <summary>
 /// Convienant method for comparing two Activations in a module's
 /// activation list. If the rule is the same and the index is the
 /// same, the method returns true. This compare method isn't meant
 /// to be used for strategies. It is up to strategies to compare
 /// two activations against each other using various criteria.
 /// </summary>
 /// <param name="act">The act.</param>
 /// <returns></returns>
 public bool compare(IActivation act)
 {
     if (act == this)
     {
         return false;
     }
     if (act.Rule == theRule && act.Index.Equals(index))
     {
         return true;
     }
     else
     {
         return false;
     }
 }
Example #48
0
 /// <summary> compare will look to see which activation has more facts.
 /// it will first compare the timestamp of the facts. If the facts
 /// are equal, it will return the activation with more facts.
 /// </summary>
 /// <param name="">left
 /// </param>
 /// <param name="">right
 /// </param>
 /// <returns>
 /// 
 /// </returns>
 protected internal virtual int compareRecency(IActivation left, IActivation right)
 {
     IFact[] lfacts = left.Facts;
     IFact[] rfacts = right.Facts;
     int len = lfacts.Length;
     int compare = 0;
     if (rfacts.Length < len)
     {
         len = rfacts.Length;
     }
     // first we compare the time stamp
     for (int idx = 0; idx < len; idx++)
     {
         if (lfacts[idx].timeStamp() > rfacts[idx].timeStamp())
         {
             return 1;
         }
         else if (lfacts[idx].timeStamp() < rfacts[idx].timeStamp())
         {
             return - 1;
         }
     }
     // the activation with more facts has a higher priority
     if (lfacts.Length > rfacts.Length)
     {
         return 1;
     }
     else if (lfacts.Length < rfacts.Length)
     {
         return - 1;
     }
     // Current we compare the fact id
     for (int idx = 0; idx < len; idx++)
     {
         if (lfacts[idx].FactId > rfacts[idx].FactId)
         {
             return 1;
         }
         else if (lfacts[idx].FactId < rfacts[idx].FactId)
         {
             return - 1;
         }
     }
     return 0;
 }
Example #49
0
 /// <summary> removeActivation will check to see if the activation is
 /// the first or last before removing it.
 /// </summary>
 public override IActivation removeActivation(IActivation act)
 {
     if (act is LinkedActivation)
     {
         LinkedActivation lact = (LinkedActivation) act;
         if (first == lact)
         {
             first = lact.Next;
         }
         if (last == lact)
         {
             last = lact.Previous;
         }
         count--;
         lact.remove();
     }
     return act;
 }
Example #50
0
 public virtual void addActivation(IActivationList thelist, IActivation newActivation)
 {
     thelist.addActivation(newActivation);
 }