Esempio n. 1
0
        public void Build(int inputsCount, 
            int[] hiddenLayersSpec, 
            int outputsCount, 
            ActivationFunction activationFunction,
            double maximumAbsoluteWeight)
        {
            layers = new List<ILayer>();
            inputLayer = new InputLayer();
            for (int i = 0; i < inputsCount; i++)
                inputLayer.AddNeuron(new Neuron());

            isBipolar = activationFunction == Neuron.BipolarActivationFunction;
            layers.Add((Layer)inputLayer);

            if (hiddenLayersSpec != null)
                for (int i = 0; i < hiddenLayersSpec.Length; i++)
                {
                    ILayer layer = new Layer();
                    for (int j = 0; j < hiddenLayersSpec[i]; j++)
                    {
                        layer.AddNeuron(new Neuron(activationFunction));
                    }
                    layers.Add(layer);
                }

            outputLayer = new Layer();
            for (int i = 0; i < outputsCount; i++)
            {
                outputLayer.AddNeuron(new Neuron(activationFunction));
            }
            layers.Add(outputLayer);

            ConnectLayers(maximumAbsoluteWeight);
        }
Esempio n. 2
0
 public override MidiInputChannel GetInputChannel(IInputLayer parent)
 {
     if (this.c == null)
     {
         this.c = new ButtonInputChannel(this, parent);
     }
     return(this.c);
 }
Esempio n. 3
0
 public override MidiInputChannel GetInputChannel(IInputLayer parent)
 {
     if (this.c == null)
     {
         this.c = new MidiRangeInputChannel(parent, this);
     }
     return(this.c);
 }
        public void InputLayer_WithSortOrder_InitializesProperty()
        {
            // Arrange
            _layer = new InputLayer(5);

            // Assert
            Assert.IsTrue(_layer.SortOrder == 5);
        }
Esempio n. 5
0
        public void Initialize()
        {
            _network     = new DFFNeuralNetwork(_inputLayerNeuronCount, _hiddenLayersCount, _hiddenLayerNeuronCount, _outputLayerNeuronCount);
            _inputLayer  = _network.Layers.OfType <IInputLayer>().First();
            _outputLayer = _network.Layers.OfType <IOutputLayer>().First();
            _hiddenLayer = _network.Layers.OfType <IHiddenLayer>().First();

            _trainingIterations = new List <INetworkTrainingIteration>();
        }
        public static IDisposable BindTo(this IObservable <bool> This, IInputLayer inputLayer, string reason)
        {
            var disableDisposable = new SerialDisposable();

            return(new CompositeDisposable(
                       disableDisposable,
                       This.DistinctUntilChanged()
                       .Subscribe(x => disableDisposable.Disposable = x
                        ? null
                        : inputLayer.Disable(reason))));
        }
Esempio n. 7
0
        static void Evaluate(Func<Tuple<INetwork, IFactory>> GetNetwork, bool verbose)
        {
            var StartTimingLayer = new TimingLayer() {StartCounters = new string[] { "Prediction-Time" } };
            var StopTimingLayer = new TimingLayer() {StopCounters = new string[] { "Prediction-Time" } };
            IInputLayer ReaderLayer = null;
            var NetworkAndFactory = GetNetwork();
            var Network = NetworkAndFactory.Item1;
            var Factory = NetworkAndFactory.Item2;
            {
                var p = Network;
                while (!(p.GetSource() is EncryptLayer)) p = p.GetSource();

                StartTimingLayer.Source = p.GetSource();
                var b = p as BaseLayer;
                b.Source = StartTimingLayer;

                // find the reader
                while (p.GetSource() != null) p = p.GetSource();
                ReaderLayer = p as IInputLayer;

                // stop the timing counters after computing the entire network
                StopTimingLayer.Source = Network;
                Network = StopTimingLayer;
                p = Network;
                while (p != null)
                {
                    p.Factory = Factory;
                    if (p is BaseLayer bas) bas.Verbose = verbose;
                    p = p.GetSource();
                }

                Network.PrepareNetwork();
            }
            int errs = 0;
            for (int i = 0; i < 10000; i++)
            {
                using (var m = Network.GetNext())
                {
                    var l = ReaderLayer.Labels[0];
                    int pred = 0;
                    Utils.ProcessInEnv(env =>
                    {
                        var dec = m.Decrypt(env);
                        for (int j = 0; j < 10; j++)
                            if (dec[j, 0] > dec[pred, 0]) pred = j;
                        if (pred != l) errs++;
                    }, Factory);

                    Console.WriteLine("errs {0}/{1} accuracy {2:0.000}% {3} prediction {4} label {5}", errs, i + 1, 100 - (100.0 * errs / (i + 1)), TimingLayer.GetStats(), pred, l);
                }
            }
            Network.DisposeNetwork();
        }
Esempio n. 8
0
        public NetworkTopology()
        {
            hiddenLayers = null;
            inputLayer   = null;
            outputLayer  = null;

            preProcessor  = null;
            postProcessor = null;

            TrainingPreProcessor = null;
            TrainingAlgorithm    = null;
        }
Esempio n. 9
0
        public NetworkTopology()
        {
            hiddenLayers = null;
            inputLayer = null;
            outputLayer = null;

            preProcessor = null;
            postProcessor = null;

            TrainingPreProcessor = null;
            TrainingAlgorithm = null;
        }
        public void InputLayer_WithSortOrderAndNeurons_InitializesProperty()
        {
            // Arrange
            var inputNeuron = new InputNeuron();

            _layer = new InputLayer(5, new List <IInputNeuron>()
            {
                inputNeuron
            });

            // Assert
            Assert.IsTrue(_layer.Neurons.Count() == 1);
            Assert.IsTrue(_layer.Neurons.First() == inputNeuron);
        }
Esempio n. 11
0
 public MidiInputChannel(IInputLayer parent, DeviceRule rule, bool registerValueChange = true, bool registerFB = true) : base(rule.GUID, parent)
 {
     base.AutofireChangedEvent = true;
     rule.NameChanged         += new EventHandler(this.HandleNameChanged);
     base.Name = rule.Name;
     if (registerValueChange)
     {
         rule.ValueChanged += new EventHandler <ValueChangedEventArgs>(this.HandleValueChanged);
     }
     if (registerFB)
     {
         this.FeedbackCB = new InputLayerChangedCallback(this.HandleFeedback);
     }
     this.rule     = rule;
     this.Changed += HandleChanged;
 }
Esempio n. 12
0
        public Network(
            IConfiguration configuration,
            IInputLayer inputLayer,
            IOutputReader <T> outputLayer
            )
        {
            if (configuration.HiddenLayerCount <= 0)
            {
                throw new ArgumentException("Layer count should be positive");
            }
            if (configuration.NeuronPerLayer <= 0)
            {
                throw new ArgumentException("Neuron per layer should be positive");
            }

            _configuration = configuration;


            randomSource = new Random();

            _inputLayer  = inputLayer;
            _outputLayer = outputLayer;

            _layers = new Layer[configuration.HiddenLayerCount + 2];

            AttachInput(inputLayer);
            CreateHiddenLayer();
            AttachOutput(outputLayer, configuration.HiddenLayerCount + 1);

            BindAllNeuron();

            UpdateWeight();

            if (configuration.SaveToDisk)
            {
                _dal = new DataAccessLibrary <T>();
                _dal.CreateDatabase();
                _dal.SaveNetwork(this);
            }
        }
Esempio n. 13
0
        public InputLayer(string name, IInputLayer parent = null)
        {
            Name = name;

            var isEnabledObservable = _refCount.Select(x => x == 0);

            // Also take parent's state into account, if any
            if (parent != null)
            {
                ((InputLayer)parent)._children.Add(this);

                isEnabledObservable = isEnabledObservable
                                      .CombineLatest(parent.IsEnabled, (x, y) => x && y);
            }

            _isEnabled = isEnabledObservable.ToReactiveProperty();

            if (Log.IsDebugEnabled)
            {
                _isEnabled
                .DistinctUntilChanged()
                .Subscribe(x => Log.Debug($"{Name} IsEnabled: {x}"));
            }
        }
Esempio n. 14
0
 public abstract MidiInputChannel GetInputChannel(IInputLayer parent);
 public static IObservable <T> DisableLayerUntilCompleted <T>(this IObservable <T> This, IInputLayer layer, string reason) =>
 Observable.Defer(() =>
 {
     var disposable = layer.Disable(reason);
     return(This.Finally(() => disposable.Dispose()));
 });
 public KohonenNetwork(IInputLayer inputLayer, IInnerLayer outputLayer)
 {
     _inputLayer = inputLayer;
     KohonenLayer = outputLayer;
 }
 public void Initialize()
 {
     _layer = new InputLayer(1);
 }
 public static IDisposable BindStateTo(this IPresenter This, IInputLayer inputLayer) =>
 This.State
 .Select(x => x == PresenterState.Ready)
 .BindTo(inputLayer, "Navigating");
Esempio n. 19
0
 protected TwoLayersNetwork(IInputLayer inputLayer, ILayer <INode> outputLayer)
     : base(inputLayer, outputLayer)
 {
     _outputLayer = outputLayer;
 }
Esempio n. 20
0
        /// <summary>
        /// Creates an object for a specified datatype
        /// </summary>
        /// <typeparam name="TObject">Interface that will be implimented and returned</typeparam>
        /// <param name="objectName">The name of the data type that will be instantiated
        /// and returned.</param>
        /// <param name="buildParam">The paramater that will be used to create the object</param>
        /// <returns>Returns a finished object instance</returns>
        public TObject CreateUserObject <TObject>(
            string objectName, Dictionary <string, string> buildParam)
        {
            switch (objectName)
            {
            case "BasicNode":
                string[] rawModifiers = ParseList(buildParam["combinationWeights"]);
                float[]  modifiers    = new float[rawModifiers.Length];

                for (int i = 0; i < rawModifiers.Length; i++)
                {
                    float.TryParse(rawModifiers[i], out modifiers[i]);
                }
                BasicNode node = new BasicNode();
                node.MetaData = buildParam;
                node.Weights  = modifiers;
                if (buildParam.ContainsKey("activationFunction"))
                {
                    node.ActivationFunc = getActivationFunction(buildParam["activationFunction"]);
                }
                INode tempNode = node;
                return((TObject)tempNode);

            case "CustomizableNode":
                rawModifiers = ParseList(buildParam["combinationWeights"]);
                modifiers    = new float[rawModifiers.Length];

                for (int i = 0; i < rawModifiers.Length; i++)
                {
                    float.TryParse(rawModifiers[i], out modifiers[i]);
                }
                CustomizableNode custNode = new CustomizableNode();
                custNode.MetaData = buildParam;
                custNode.Weights  = modifiers;
                if (buildParam.ContainsKey("activationFunction"))
                {
                    custNode.ActivationFunc =
                        getActivationFunction(buildParam["activationFunction"]);
                }
                if (buildParam.ContainsKey("combinationFunction"))
                {
                    custNode.ComboFunction =
                        getCombinationFunction(buildParam["combinationFunction"]);
                }
                tempNode = custNode;
                return((TObject)tempNode);

            case "BasicInputLayer":
                BasicInputLayer input = new BasicInputLayer();
                input.MetaData = buildParam;
                string[] rawValues = ParseList(buildParam["inputIndexes"]);
                int[]    values    = new int[rawValues.Length];
                for (int i = 0; i < rawValues.Length; i++)
                {
                    int.TryParse(rawValues[i], out values[i]);
                }
                input.ValueIndexes = values;
                IInputLayer tempIn = input;
                return((TObject)tempIn);

            case "BasicOutputLayer":
                IOutputLayer tempOut = new BasicOutputLayer();
                tempOut.MetaData = buildParam;
                return((TObject)tempOut);

            case "BasicLayer":
                int count;
                int.TryParse(buildParam["nodeCount"], out count);
                IHiddenLayer tempHidden = new BasicLayer(new INode[count]);
                tempHidden.MetaData = buildParam;
                return((TObject)tempHidden);

            case "ThreadedHiddenLayer":
                int.TryParse(buildParam["nodeCount"], out count);
                tempHidden          = new ThreadedHiddenLayer(new INode[count]);
                tempHidden.MetaData = buildParam;
                return((TObject)tempHidden);

            case "HillClimbAlgo":
                ITrainingAlgorithm algo = new HillClimbAlgo();
                return((TObject)algo);

            default:
                return(default(TObject));
            }
        }
Esempio n. 21
0
 public MidiRangeInputChannel(IInputLayer parent, DeviceRule r) : base(parent, r, true, true)
 {
 }
Esempio n. 22
0
 public ButtonInputChannel(DeviceRule d, IInputLayer parent) : base(parent, d, true, true)
 {
 }