示例#1
0
        protected void CreateNeurons(int inputSize, int[] hiddenSizes, int outputSize)
        {
            for (var i = 0; i < inputSize; i++)
            {
                InputLayer.Add(new Neuron());
            }

            var firstHiddenLayer = new List <Neuron>();

            for (var i = 0; i < hiddenSizes[0]; i++)
            {
                firstHiddenLayer.Add(new Neuron(InputLayer, HiddenActivationType));
            }

            HiddenLayers.Add(firstHiddenLayer);

            for (var i = 1; i < hiddenSizes.Length; i++)
            {
                var hiddenLayer = new List <Neuron>();
                for (var j = 0; j < hiddenSizes[i]; j++)
                {
                    hiddenLayer.Add(new Neuron(HiddenLayers[i - 1], HiddenActivationType));
                }
                HiddenLayers.Add(hiddenLayer);
            }

            for (var i = 0; i < outputSize; i++)
            {
                OutputLayer.Add(new Neuron(HiddenLayers.Last(), OutputActivationType));
            }
        }
示例#2
0
        protected Network(Network initialNetwork)
        {
            var inputLayerSettings = LayerToLayerSettings(initialNetwork.InputLayer);
            var inputLayer         = new InputLayer(inputLayerSettings, this);

            Layers = new List <Layer>(initialNetwork.Layers.Count)
            {
                inputLayer
            };
            foreach (var initialLayer in initialNetwork.HiddenLayers)
            {
                var prev          = Layers.Last();
                var layerSettings = LayerToLayerSettings(initialLayer);
                Layers.Add(new HiddenLayer(layerSettings, this, prev));
            }
            var outputLayerSettings = LayerToLayerSettings(initialNetwork.OutputLayer);

            Layers.Add(new OutputLayer(outputLayerSettings, this, Layers.Last()));

            foreach (var(next, prev) in Layers.Zip(initialNetwork.Layers, (next, prev) => (next, prev)))
            {
                Parallel.ForEach(next.Neurons.Zip(prev.Neurons, (nextNeuron, prevNeuron) => new { nextNeuron, prevNeuron }), x =>
                {
                    x.nextNeuron.Bias    = x.prevNeuron.Bias;
                    x.nextNeuron.Weights = new List <double>(x.prevNeuron.Weights);
                });
                //foreach (var (nextNeuron, prevNeuron) in next.Neurons.Zip(prev.Neurons, (nextNeuron, prevNeuron) => (nextNeuron, prevNeuron)))
                //{
                //    nextNeuron.Bias = prevNeuron.Bias;
                //    nextNeuron.Weights = new List<double>(prevNeuron.Weights);
                //}
            }
        }
示例#3
0
	/// <summary>
	/// Selects the first button
	/// </summary>
	/// <param name="key">the key</param>
	/// <param name="strength">the strength</param>
	/// <param name="layer">the layer</param>
	public void OnKeyDown(Key key, float strength, InputLayer layer)
	{
		if (UI.Instance.EventSystem != null && UI.Instance.EventSystem.currentSelectedGameObject == null && (key == Key.Forward || key == Key.Backward || key == Key.Left || key == Key.Right) && layer == InputLayer.UI)
		{
			openSubmenus.Peek().SelectFirstButton();
		}
	}
示例#4
0
        private void PushLayer(CNNLayer layer)
        {
            if (Layers.Count != 0)
            {
                layer.PrevLayer = Layers.Last();
                layer.InSize    = Layers.Last().OutSize;
                layer.InDepth   = Layers.Last().OutDepth;
                Layers.Last().NextLayer = layer;

                if (layer is InputLayer)
                {
                    throw new ArgumentException("You can't use more than 1 input layer");
                }

                if (layer is OutputLayer)
                {
                    if (OutputLayer != null)
                    {
                        throw new ArgumentException("You can't use more than 1 output layer");
                    }
                    OutputLayer = (OutputLayer)layer;
                }
            }
            else
            {
                if (!(layer is InputLayer))
                {
                    throw new ArgumentException("First network layer should be " + nameof(InputLayer));
                }
                InputLayer = (InputLayer)layer;
            }

            Layers.Add(layer);
            layer.Setup();
        }
示例#5
0
        public OutputNeuronFactory(InputLayer inputLayer, ConnectionFactory connectionFactory)
        {
            this.inputLayer = inputLayer;
            this.connectionFactory = connectionFactory;

            weightUpperBound = L / (L - 1f + inputLayer.Size);
        }
示例#6
0
    // Use this for initialization
    void Start()
    {
        Input  = new InputLayer();
        Hidden = new Layer();
        Output = new Layer();


        for (int i = 0; i < 75; i++)
        {
            Input.Neurons.Add(new InputNeuron());
        }
        for (int i = 0; i < 30; i++)
        {
            Neuron n = new Neuron();
            for (int x = 0; x < Input.Neurons.Count; x++)
            {
                n.Weights.Add(UnityEngine.Random.value);
            }
            n.Threshold = ThreshAdjust;
            Hidden.Neurons.Add(n);
        }
        for (int i = 0; i < 15; i++)
        {
            Neuron n = new Neuron();
            for (int x = 0; x < Hidden.Neurons.Count; x++)
            {
                n.Weights.Add(UnityEngine.Random.value);
            }
            n.Threshold = ThreshAdjust;
            Output.Neurons.Add(n);
        }
        StartCoroutine("WaitForEverAThing");
    }
示例#7
0
            public static Tensor[] Input(int[] batch_shape = null,
                                         TF_DataType dtype = TF_DataType.DtInvalid,
                                         string name       = null,
                                         bool sparse       = false,
                                         Tensor tensor     = null)
            {
                var batch_size = batch_shape[0];
                var shape      = batch_shape.Skip(1).ToArray();

                InputLayer input_layer = null;

                if (batch_shape != null)
                {
                    input_layer = new InputLayer(
                        batch_input_shape: batch_shape,
                        name: name,
                        dtype: dtype,
                        sparse: sparse,
                        input_tensor: tensor);
                }
                else
                {
                    input_layer = new InputLayer(
                        input_shape: shape,
                        batch_size: batch_size,
                        name: name,
                        dtype: dtype,
                        sparse: sparse,
                        input_tensor: tensor);
                }

                var outputs = input_layer.inbound_nodes[0].output_tensors;

                return(outputs);
            }
        //public Network(double[][] inputValues, double[] desiredOutput, int numberOfLayers, int[] numberOfNeuronsInLayers)
        public Network(int inputWidth, int numberOfLayers, int[] numberOfNeuronsInLayers)
        {
            //_input = inputValues;
            //_desiredOutput = desiredOutput;

            _inputWidth = inputWidth;
            _numberOfLayers = numberOfLayers;

            Layers = new ILayer[_numberOfLayers + 2]; // nie liczymy warstwy wejściowej i wyjściowej

            //najpierw warstwa wejciowa
            Layers[0] = new InputLayer(_inputWidth, _inputWidth);
            //Layers[0] = new InputLayer();

            for (int i = 1; i < numberOfLayers+1; i++)
            {
                Layers[i] = new Layer(numberOfNeuronsInLayers[i - 1], Layers[i - 1].Neurons.Length);
            }

            //warstwa wyjściowa co ma tylko 1 neuron
            Layers[numberOfLayers+1] = new Layer(1, Layers[numberOfLayers].Neurons.Length);

            _totalNumberOfWeights = 0;
            foreach (var layer in Layers)
            {
                _totalNumberOfWeights += layer.NumberOfWeights();
            }
        }
示例#9
0
        private NeuralNetwork.NeuralNetwork CreateNn()
        {
            NeuralNetwork.NeuralNetwork nn;

            nn = new NeuralNetwork.NeuralNetwork();

            InputLayer inputLayer = nn.CreateInputLayer();

            //inHeading = inputLayer.CreateNeuron("heading");
            //inVelocityAngle = inputLayer.CreateNeuron("v_angle");
            //inVelocityLength = inputLayer.CreateNeuron("v_length");
            inNextCheckpointDistance0  = inputLayer.CreateNeuron("c_dist0");
            inNextCheckpointDistance1  = inputLayer.CreateNeuron("c_dist1");
            inNextCheckpointDistance2  = inputLayer.CreateNeuron("c_dist2");
            inNextCheckpointDistance3  = inputLayer.CreateNeuron("c_dist3");
            inNextCheckpointAngle0     = inputLayer.CreateNeuron("c_angle0");
            inNextCheckpointAngle1     = inputLayer.CreateNeuron("c_angle1");
            inNextCheckpointAngle2     = inputLayer.CreateNeuron("c_angle2");
            inNextCheckpointAngle3     = inputLayer.CreateNeuron("c_angle3");
            inNextCheckpointAngle4     = inputLayer.CreateNeuron("c_angle4");
            inNextCheckpointAngle5     = inputLayer.CreateNeuron("c_angle5");
            inNextNextCheckpointAngle0 = inputLayer.CreateNeuron("nnc_angle0");
            inNextNextCheckpointAngle1 = inputLayer.CreateNeuron("nnc_angle1");
            inNextNextCheckpointAngle2 = inputLayer.CreateNeuron("nnc_angle2");
            inNextNextCheckpointAngle3 = inputLayer.CreateNeuron("nnc_angle3");
            inNextNextCheckpointAngle4 = inputLayer.CreateNeuron("nnc_angle4");
            inNextNextCheckpointAngle5 = inputLayer.CreateNeuron("nnc_angle5");
            //inNextCheckpointDistance = inputLayer.CreateNeuron("c_dist");

            OutputLayer outputLayer = nn.CreateOutputLayer();

            outHeading0 = outputLayer.CreateNeuron("o_heading0");
            outHeading1 = outputLayer.CreateNeuron("o_heading1");
            outHeading2 = outputLayer.CreateNeuron("o_heading2");
            outHeading3 = outputLayer.CreateNeuron("o_heading3");
            outHeading4 = outputLayer.CreateNeuron("o_heading4");
            outHeading5 = outputLayer.CreateNeuron("o_heading5");
            outThrust0  = outputLayer.CreateNeuron("o_thrust0");
            outThrust1  = outputLayer.CreateNeuron("o_thrust1");
            outThrust2  = outputLayer.CreateNeuron("o_thrust2");
            outThrust3  = outputLayer.CreateNeuron("o_thrust3");
            outThrust4  = outputLayer.CreateNeuron("o_thrust4");
            outThrust5  = outputLayer.CreateNeuron("o_thrust5");
            outThrust6  = outputLayer.CreateNeuron("o_thrust6");

            for (int i = 0; i < 3; i++)
            {
                HiddenLayer hiddenLayer = nn.CreateHiddenLayer();

                for (int j = 0; j < 32; j++)
                {
                    HiddenNeuron hiddenNeuron = hiddenLayer.CreateNeuron(string.Format("hidden[{0}][{1}]", i, j));
                }
            }

            nn.CreateFullConnections();
            nn.InitWithRandomValues();

            return(nn);
        }
示例#10
0
        private static ITrainer CreateIrisTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Iris();

            ITrainer trainer = sigma.CreateTrainer("iris-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(4)
                                           + FullyConnectedLayer.Construct(12)
                                           + FullyConnectedLayer.Construct(3)
                                           + OutputLayer.Construct(3)
                                           + SquaredDifferenceCostLayer.Construct();
            //trainer.Network = Serialisation.ReadBinaryFileIfExists("iris.sgnet", trainer.Network);

            trainer.TrainingDataIterator = new MinibatchIterator(50, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.06);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            //trainer.AddGlobalHook(new StopTrainingHook(atEpoch: 100));
            //trainer.AddLocalHook(new EarlyStopperHook("optimiser.cost_total", 20, target: ExtremaTarget.Min));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), reportEpochIteration: true));
            //.On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));
            //trainer.AddLocalHook(new DiskSaviorHook<INetwork>("network.self", Namers.Dynamic("iris_epoch{0}.sgnet", "epoch"), verbose: true)
            //    .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: 1));

            return(trainer);
        }
示例#11
0
        private static ITrainer CreateParkinsonsTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateTrainer("parkinsons-trainer");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(22)
                               + FullyConnectedLayer.Construct(140)
                               + FullyConnectedLayer.Construct(20)
                               + FullyConnectedLayer.Construct(1)
                               + OutputLayer.Construct(1)
                               + SquaredDifferenceCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);
            trainer.Operator  = new CpuSinglethreadedOperator(new DebugHandler(new CpuFloat32Handler()));

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            return(trainer);
        }
        public FCSuperResolution()
        {
            superres_enc_front = InputLayer.Create(StartSide, 3);
            superres_enc_back  = ActivationLayer.Create <LeakyReLU>();

            superres_enc_front.Append(
                FCLayer.Create(16, 16).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(8, 8).Append(
                            ActivationLayer.Create <LeakyReLU>().Append(
                                FCLayer.Create(4, 4).Append(
                                    superres_enc_back
                                    ))))));

            superres_dec_front = InputLayer.Create(4, 4);
            superres_dec_back  = ActivationLayer.Create <Tanh>();

            superres_dec_front.Append(
                FCLayer.Create(8, 8).Append(
                    ActivationLayer.Create <LeakyReLU>().Append(
                        FCLayer.Create(16, 8).Append(
                            ActivationLayer.Create <LeakyReLU>().Append(
                                FCLayer.Create(32, 3).Append(
                                    superres_dec_back
                                    ))))));

            superres_enc_back.Append(superres_dec_front);

            //Initialize Weights
            superres_enc_front.SetupInternalState();
            superres_enc_front.InitializeWeights(new UniformWeightInitializer(0, 0.001f));
        }
        public void InputLayer_Backward()
        {
            var batchSize = 1;
            var width     = 28;
            var height    = 28;
            var depth     = 3;
            var random    = new Random(232);
            var fanIn     = width * height * depth;

            var sut = new InputLayer(height, width, depth);

            sut.Initialize(1, 1, 1, batchSize, Initialization.GlorotUniform, random);

            var input = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            sut.Forward(input);

            var delta = Matrix <float> .Build.Random(batchSize, fanIn, random.Next());

            var actual = sut.Backward(delta);

            var expected = delta;

            MatrixAsserts.AreEqual(expected, actual);
        }
示例#14
0
        private static void SampleXor()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("logical");

            sigma.SetRandomSeed(0);
            sigma.Prepare();

            RawDataset dataset = new RawDataset("xor");

            dataset.AddRecords("inputs", new[] { 0, 0 }, new[] { 0, 1 }, new[] { 1, 0 }, new[] { 1, 1 });
            dataset.AddRecords("targets", new[] { 0 }, new[] { 0 }, new[] { 0 }, new[] { 1 });

            ITrainer trainer = sigma.CreateTrainer("xor-trainer");

            trainer.Network.Architecture = InputLayer.Construct(2) + FullyConnectedLayer.Construct(2) + FullyConnectedLayer.Construct(1) + OutputLayer.Construct(1) + SquaredDifferenceCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(1, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new GradientDescentOptimiser(learningRate: 0.1);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new StopTrainingHook(atEpoch: 10000));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch), averageValues: true));
            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Stop), averageValues: true));
            trainer.AddLocalHook(new ValueReporter("network.layers.*<external_output>._outputs.default.activations", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.weights", TimeStep.Every(1, TimeScale.Stop)));
            trainer.AddLocalHook(new ValueReporter("network.layers.*-fullyconnected.biases", TimeStep.Every(1, TimeScale.Stop)));

            sigma.Run();
        }
示例#15
0
        private static void SampleHutter()
        {
            const long timeWindowSize = 10L;

            SigmaEnvironment sigma = SigmaEnvironment.Create("recurrent");

            IDataSource      source    = new MultiSource(new FileSource("enwik8"), new CompressedSource(new MultiSource(new FileSource("enwik8.zip"), new UrlSource("http://mattmahoney.net/dc/enwik8.zip"))));
            IRecordExtractor extractor = new CharacterRecordReader(source, (int)(timeWindowSize + 1), Encoding.ASCII)
                                         .Extractor(new ArrayRecordExtractor <short>(ArrayRecordExtractor <short>
                                                                                     .ParseExtractorParameters("inputs", new[] { 0L }, new[] { timeWindowSize }, "targets", new[] { 0L }, new[] { timeWindowSize }))
                                                    .Offset("targets", 1L))
                                         .Preprocess(new PermutePreprocessor(0, 2, 1))
                                         .Preprocess(new OneHotPreprocessor(0, 255));
            IDataset dataset = new ExtractedDataset("hutter", ExtractedDataset.BlockSizeAuto, false, extractor);

            ITrainer trainer = sigma.CreateTrainer("hutter");

            trainer.Network.Architecture = InputLayer.Construct(256) + RecurrentLayer.Construct(256) + OutputLayer.Construct(256) + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(32, dataset);
            trainer.AddNamedDataIterator("validation", new MinibatchIterator(100, dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.07);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), averageValues: true));
            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(10, TimeScale.Iteration)));

            sigma.PrepareAndRun();
        }
示例#16
0
        private static void SampleParkinsons()
        {
            SigmaEnvironment sigma = SigmaEnvironment.Create("parkinsons");

            IDataset dataset = Defaults.Datasets.Parkinsons();

            ITrainer trainer = sigma.CreateGhostTrainer("parkinsons-trainer");

            trainer.Network.Architecture = InputLayer.Construct(22)
                                           + FullyConnectedLayer.Construct(140)
                                           + FullyConnectedLayer.Construct(20)
                                           + FullyConnectedLayer.Construct(1)
                                           + OutputLayer.Construct(1)
                                           + SquaredDifferenceCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(10, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.01);

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new UniClassificationAccuracyReporter("validation", 0.5, TimeStep.Every(1, TimeScale.Epoch)));

            sigma.AddTrainer(trainer);

            sigma.PrepareAndRun();
        }
示例#17
0
        public Network(List <LayerSettings> layersHyperParameters)
        {
            if (layersHyperParameters.Count < 2)
            {
                return;
            }

            var inputLayer = new InputLayer(layersHyperParameters.First(), this);

            Layers = new List <Layer>(layersHyperParameters.Count)
            {
                inputLayer
            };
            foreach (var layerParams in layersHyperParameters.Skip(1).Take(layersHyperParameters.Count - 2))
            {
                var prev = Layers.Last();
                Layers.Add(new HiddenLayer(layerParams, this, prev));
                prev.InitNeuronsWeights();
            }
            var lastHidden = Layers.Last();

            lastHidden.InitNeuronsWeights();
            Layers.Add(new OutputLayer(layersHyperParameters.Last(), this, lastHidden));
            Layers.Last().InitNeuronsWeights();
        }
示例#18
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) that will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.Mnist();

            ITrainer trainer = sigma.CreateTrainer("mnist-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(28, 28)
                                           + DropoutLayer.Construct(0.2)
                                           + FullyConnectedLayer.Construct(1000, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(800, activation: "rel")
                                           + DropoutLayer.Construct(0.4)
                                           + FullyConnectedLayer.Construct(10, activation: "sigmoid")
                                           + OutputLayer.Construct(10)
                                           + SoftMaxCrossEntropyCostLayer.Construct();
            trainer.TrainingDataIterator = new MinibatchIterator(100, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(Defaults.Datasets.MnistValidation()));
            trainer.Optimiser = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator  = new CudaSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.1));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.05));

            trainer.AddLocalHook(new ValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Iteration), reportEpochIteration: true)
                                 .On(new ExtremaCriteria("optimiser.cost_total", ExtremaTarget.Min)));

            trainer.AddLocalHook(new RunningTimeReporter(TimeStep.Every(1, TimeScale.Epoch), 4));

            return(trainer);
        }
示例#19
0
        public void Convert()
        {
            var layers = _graph.GetOperators().Select(ConvertOperator).ToList();

            foreach (var inputPair in _inputs)
            {
                if (_outputs.TryGetValue(inputPair.Value, out var output))
                {
                    inputPair.Key.SetConnection(output);
                }
            }

            var inputs = new List <InputLayer>();

            foreach (var conn in _inputs.Keys.Where(o => o.Connection == null))
            {
                var input = new InputLayer(conn.Dimensions);
                conn.SetConnection(input.Output);
                inputs.Add(input);
            }

            var outputs = new List <OutputLayer>();

            foreach (var conn in _outputs.Values.Where(o => !o.Connections.Any()))
            {
                var output = new OutputLayer(conn.Dimensions);
                conn.AddConnection(output.Input);
                outputs.Add(output);
            }

            Graph = new Graph(inputs, outputs);
        }
示例#20
0
        public static ITrainer CreateTicTacToeTrainer(SigmaEnvironment sigma)
        {
            IDataset dataset = Defaults.Datasets.TicTacToe();

            ITrainer trainer = sigma.CreateTrainer("tictactoe-trainer");

            trainer.Network = new Network();
            trainer.Network.Architecture = InputLayer.Construct(9)
                                           + FullyConnectedLayer.Construct(72, "tanh")
                                           + FullyConnectedLayer.Construct(99, "tanh")
                                           + FullyConnectedLayer.Construct(3, "tanh")
                                           + OutputLayer.Construct(3)
                                           + SoftMaxCrossEntropyCostLayer.Construct();

            trainer.TrainingDataIterator = new MinibatchIterator(21, dataset);
            trainer.AddNamedDataIterator("validation", new UndividedIterator(dataset));
            trainer.Optimiser = new MomentumGradientOptimiser(learningRate: 0.01, momentum: 0.9);
            trainer.Operator  = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.*", new GaussianInitialiser(standardDeviation: 0.1));

            trainer.AddLocalHook(new AccumulatedValueReporter("optimiser.cost_total", TimeStep.Every(1, TimeScale.Epoch)));
            trainer.AddHook(new MultiClassificationAccuracyReporter("validation", TimeStep.Every(1, TimeScale.Epoch), tops: new[] { 1, 2 }));

            trainer.AddGlobalHook(new DiskSaviorHook <INetwork>(TimeStep.Every(1, TimeScale.Epoch), "network.self", Namers.Static("tictactoe.sgnet"), verbose: true)
                                  .On(new ExtremaCriteria("shared.classification_accuracy_top1", ExtremaTarget.Max)));

            return(trainer);
        }
示例#21
0
        /// <summary>
        /// Инициализация слоёв нейронной сети.
        /// </summary>
        /// <param name="listOfPicturesmatrix">Список матриц изображений.</param>
        /// <param name="layers">Список слоёв.</param>
        /// <param name="filterCore">Ядро фильтра.</param>
        /// <param name="inputLayerNeurons">Нейроны выходного слоя.</param>
        /// <param name="convolutionalLayerNeurons">Нейроны свёрточного слоя.</param>
        /// <param name="hiddenLayerNeurons">Нейроны скрытого слоя.</param>
        /// <param name="outputNeuron">Нейроны выходного слоя.</param>
        private static void LayersInitialize(List <double[, ]> listOfPicturesmatrix, List <Layer> layers,
                                             double[,] filterCore, out Dictionary <string, double> inputLayerNeurons,
                                             out List <NeuronModel> convolutionalLayerNeurons, out List <NeuronModel> hiddenLayerNeurons,
                                             out NeuronModel outputNeuron)
        {
            var firstDataSet = listOfPicturesmatrix.First();
            var inputLayer   = new InputLayer(firstDataSet);

            inputLayer.Initialize();
            layers.Add(inputLayer);

            inputLayerNeurons = inputLayer.GetLayerNeurons();
            var convolutionalLayer = new ConvolutionalLayer(inputLayerNeurons);

            convolutionalLayer.Initialize(filterCore);
            layers.Add(convolutionalLayer);

            convolutionalLayerNeurons = convolutionalLayer.GetLayerNeurons();
            var hiddenLayer = new HiddenLayer(convolutionalLayerNeurons);

            hiddenLayer.Initialize();
            layers.Add(hiddenLayer);

            hiddenLayerNeurons = hiddenLayer.GetLayerNeurons();
            var outputLayer = new OutputLayer(hiddenLayerNeurons);

            outputLayer.Initilize();
            layers.Add(outputLayer);

            outputNeuron = outputLayer.GetOutputNeuron();
        }
示例#22
0
        public MainGameWindow()
            : base(800, 600, GraphicsMode.Default, "Sea battles")
        {
            Resize += delegate(object sender, EventArgs e)
            {
                // Note that we cannot call any OpenGL methods directly. What we can do is set
                // a flag and respond to it from the rendering thread.
                lock (update_lock)
                {
                    viewport_changed = true;
                    viewport_width = this.ClientRectangle.Width;
                    viewport_height = this.ClientRectangle.Height;
                }
            };

            handlers.Add(typeof(TraceText), WriteTitle);
            handlers.Add(typeof(ButtonUp), HandleButtonUp);
            mainCamera = new Camera(0, 0, 1, 800, 600);
            input = new InputLayer(this);

            ship = Ship.Create(new PointF(0, 0), 40, 10);
            ship2 = Ship.Create(new PointF(0, 0), 40, 1);

            ship.Name = "player";
            ship.Graphics.Name = "player graphics";
            ship2.Graphics.Name = "enemy graphics";
            //box = new TestBoundingObject(BoundShape.Ship, new PointF(0, 0), 10, 20, Color.Green, Color.Red, 0);
            //box2 = new TestBoundingObject(BoundShape.Circle, new PointF(0, 0), 20, 40, Color.White, Color.Black, -0.5f);

            //MessageDispatcher.RegisterHandler(typeof(ButtonDown), box);
            //MessageDispatcher.RegisterHandler(typeof(SetPosition), box);
            //MessageDispatcher.RegisterHandler(typeof(SetSpeed), box);
            //MessageDispatcher.RegisterHandler(typeof(GetOwnerPosition), box);
            //MessageDispatcher.RegisterHandler(typeof(InformPosition), box);
            //MessageDispatcher.RegisterHandler(typeof(BoundSetCollision), box);
            //MessageDispatcher.RegisterHandler(typeof(BoundSetNotCollision), box);

            //---------------------------------------------------
            //graphicsAspects.Add(ship.Graphics);

            //только корабль игрока подписывается на приём пользовательского ввода
            MessageDispatcher.RegisterHandler(typeof(ButtonDown), ship);
            MessageDispatcher.RegisterHandler(typeof(ButtonHold), ship);

            //MessageDispatcher.RegisterHandler(typeof(SetPosition), ship);
            //MessageDispatcher.RegisterHandler(typeof(SetSpeed), ship);
            //// нужно для определения координат и скорости корабля в момент выстрела
            //// в данном случае owner-ом является ship
            //MessageDispatcher.RegisterHandler(typeof(GetOwnerPosition), ship);
            //MessageDispatcher.RegisterHandler(typeof(InformPosition), ship);
            //MessageDispatcher.RegisterHandler(typeof(Shoot), ship);
            //MessageDispatcher.RegisterHandler(typeof(BoundSetCollision), ship);
            //MessageDispatcher.RegisterHandler(typeof(BoundSetNotCollision), ship);
            ////MessageDispatcher.RegisterHandler(typeof(SetPosition), anotherShip);

            MessageDispatcher.RegisterHandler(typeof(TraceText), this);
            MessageDispatcher.RegisterHandler(typeof(ButtonUp), this);

            //timer = new System.Threading.Timer(new TimerCallback(timer_Tick), null, 1000, 1000);
        }
示例#23
0
	/// <summary>
	/// Switches to the next window
	/// </summary>
	public void OnKeyCodeDown(int key, float strength, InputLayer layer)
	{
		if (key == (int)KeyCode.Tab)
		{
			target.Select();
		}
	}
示例#24
0
        public void DeuxNeuronesPropagate()
        {
            var inputs =
                new InputLayer(
                    new InputNeurone(1, 1),
                    new InputNeurone(2, 2)
                    );

            var resultLayer =
                new DeepLayer(
                    inputs,
                    new DeepNeurone(
                        1,
                        new SigmoidFnc(),
                        new Synapse(1, 1, 0.5),
                        new Synapse(2, 1, 0.5)
                        ),
                    new DeepNeurone(
                        2,
                        new SigmoidFnc(),
                        new Synapse(1, 2, 0.5),
                        new Synapse(2, 2, 0.5)
                        )
                    );

            Assert.AreEqual(
                resultLayer.propagate().neuroneValue(1).value(),
                new Sigmoid(1 * 0.5 + 2 * 0.5).value()
                );
            Assert.AreEqual(
                resultLayer.propagate().neuroneValue(2).value(),
                new Sigmoid(1 * 0.5 + 2 * 0.5).value()
                );
        }
示例#25
0
        /// <summary>
        /// Instantiate a Keras tensor.
        /// </summary>
        /// <param name="shape"></param>
        /// <param name="batch_size"></param>
        /// <param name="dtype"></param>
        /// <param name="name"></param>
        /// <param name="sparse">
        /// A boolean specifying whether the placeholder to be created is sparse.
        /// </param>
        /// <param name="ragged">
        /// A boolean specifying whether the placeholder to be created is ragged.
        /// </param>
        /// <param name="tensor">
        /// Optional existing tensor to wrap into the `Input` layer.
        /// If set, the layer will not create a placeholder tensor.
        /// </param>
        /// <returns></returns>
        public Tensor Input(TensorShape shape             = null,
                            int batch_size                = -1,
                            TensorShape batch_input_shape = null,
                            TF_DataType dtype             = TF_DataType.DtInvalid,
                            string name   = null,
                            bool sparse   = false,
                            bool ragged   = false,
                            Tensor tensor = null)
        {
            if (batch_input_shape != null)
            {
                shape = batch_input_shape.dims.Skip(1).ToArray();
            }

            var args = new InputLayerArgs
            {
                Name            = name,
                InputShape      = shape,
                BatchInputShape = batch_input_shape,
                BatchSize       = batch_size,
                DType           = dtype,
                Sparse          = sparse,
                Ragged          = ragged,
                InputTensor     = tensor
            };

            var layer = new InputLayer(args);

            return(layer.InboundNodes[0].Outputs);
        }
示例#26
0
    public NeuralNet(NeuronMode mode, bool evolveActivationThreshold, int inputCount, int outputCount, int middleCount, int hiddenLayerCount)
    {
        this.mode   = mode;
        inputLayer  = new InputLayer(inputCount);
        outputLayer = new NeuronLayer(mode, outputCount, defaultActivationThreshold, evolveActivationThreshold);
        NeuronLayer prevLayer = null;

        for (int i = 0; i < hiddenLayerCount; ++i)
        {
            NeuronLayer newLayer = new NeuronLayer(mode, middleCount, defaultActivationThreshold, evolveActivationThreshold);
            hiddenLayers.Add(newLayer);
            if (prevLayer != null)
            {
                prevLayer.setOutputLayer(newLayer);
            }
            prevLayer = newLayer;
        }
        if (prevLayer != null)
        {
            prevLayer.setOutputLayer(outputLayer);
            inputLayer.setOutputLayer(hiddenLayers[0]);
        }
        else
        {
            inputLayer.setOutputLayer(outputLayer);
        }
    }
示例#27
0
        public static BPNetwork Create(int inputLayerNeuronCount, int[] hiddenLayerNeuronCount, int outputLayerNeuronCount, ActivationFunction hiddenLayerActivationFunction = ActivationFunction.SIGMOID, ActivationFunction outputLayerActivationFunction = ActivationFunction.SIGMOID)
        {
            BPNetwork network = new BPNetwork();

            var inputLayer = new InputLayer();

            inputLayer.InitNeurons(inputLayerNeuronCount, false, ActivationFunction.LINEAR);
            network.InputLayer = inputLayer;

            network.HiddenLayers = new List <HiddenLayer>();
            if (hiddenLayerNeuronCount != null)
            {
                hiddenLayerNeuronCount.ToList().ForEach(count => {
                    network.HiddenLayers.Add(new HiddenLayer());
                });

                for (var i = 0; i < hiddenLayerNeuronCount.Length; i++)
                {
                    network.HiddenLayers[i].InitNeurons(hiddenLayerNeuronCount[i], true, hiddenLayerActivationFunction);
                }
            }

            var outputLayer = new OutputLayer();

            outputLayer.InitNeurons(outputLayerNeuronCount, true, outputLayerActivationFunction);
            network.OutputLayer = outputLayer;

            network.ConstructConnectionsAndRandonizeWeights();

            return(network);
        }
示例#28
0
 //Sends the inputs once through the network
 public void Train(params double[] inputs)
 {
     int i = 0;
     InputLayer.ForEach(a => a.Value = inputs[i++]); //Assign input data to input-neurons
     HiddenLayer.ForEach(a => a.Calc_Value());       //Hidden Calc
     OutputLayer.ForEach(a => a.Calc_Value());       //Outuput Calc 
 }
        public void Export(string path, string fileName)

        {
            var fullPath = Path.Combine(path, fileName);

            if (File.Exists(fullPath))
            {
                File.Delete(Path.Combine(path, fullPath));
            }

            var            jsonObject = new List <string>();
            JsonSerializer serializer = new JsonSerializer();

            for (int i = 0; i < Weights.Length; i++)
            {
                jsonObject.Add(JsonConvert.SerializeObject(this.Weights[i].Data));
                jsonObject.Add(JsonConvert.SerializeObject(this.Weights[i].Rows));
                jsonObject.Add(JsonConvert.SerializeObject(this.Weights[i].Columns));
            }
            jsonObject.Add("#");
            for (int i = 0; i < Bias.Length; i++)
            {
                jsonObject.Add(JsonConvert.SerializeObject(this.Bias[i].Data));
                jsonObject.Add(JsonConvert.SerializeObject(this.Bias[i].Rows));
                jsonObject.Add(JsonConvert.SerializeObject(this.Bias[i].Columns));
            }
            jsonObject.Add("#");
            jsonObject.Add(JsonConvert.SerializeObject(this.HiddenLayer));
            jsonObject.Add((ActivationFuntion.Method.Name));
            jsonObject.Add(InputLayer.ToString());
            jsonObject.Add(OutputLayer.ToString());
            File.WriteAllLines(fullPath, jsonObject);
        }
示例#30
0
        private static NeuralNetwork InitializeNeuralNetwork(int seed)
        {
            Random random = new Random(seed == 0 ? new Random().Next() : seed);

            float RandomWeight() => (float)(random.NextDouble() * 2 - 1);

            Layer prevLayer;

            InputLayer li = new InputLayer(3, 5);

            prevLayer = li;

            ConvolutionalLayer l0 = new ConvolutionalLayer(8, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l0;
            prevLayer.InitializeWeights(RandomWeight);

            ConvolutionalLayer l2 = new ConvolutionalLayer(16, 2, 1, 0, prevLayer, ActivationFunctions.ReLU(true));

            prevLayer = l2;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l7 = new FullyConnectedLayer(16, prevLayer, ActivationFunctions.Sigmoid(1));

            prevLayer = l7;
            prevLayer.InitializeWeights(RandomWeight);

            FullyConnectedLayer l8 = new FullyConnectedLayer(10, prevLayer, ActivationFunctions.SoftMax(1));

            prevLayer = l8;
            prevLayer.InitializeWeights(RandomWeight);

            return(new NeuralNetwork(li, l0, l2, l7, l8));
        }
示例#31
0
        public TensorModel ReadModel(List <string> _RawData)
        {
            TensorModel Model = new TensorModel();

            try
            {
                // Doc dong dau tien, tao lop InputLayer va set ten cho model
                var _stringtemp = _RawData[0].Split(new char[] { '(', ')' });
                Model.ModelName = _stringtemp[0].Replace("def", string.Empty);
                InputLayer _inputlayer;
                Model.Layers.Add(_inputlayer = new InputLayer(_stringtemp[1]));

                // Doc cac dong tiep theo la thu tu cac lop cua Model
                for (int i = 1; i < _RawData.Count; i++)
                {
                    Layer _layertemp = new Layer();
                    Model.Layers.Add(LayerReader(_RawData[i]));
                }
            }
            catch
            {
            }
            // Tra ve Tensorflow Model
            return(Model);
        }
示例#32
0
        private void Create(InputLayer inputLayer, INodeLayer[] layers)
        {
            NodeLayers       = new INodeLayer[layers.Length + 1];
            ConnectionLayers = new List <IConnectionLayer>();
            var nodes = new List <List <INode> >();

            NumberOfInputs  = inputLayer.Input.Length;
            NumberOfOutputs = layers.Last().Output.Length;
            NodeLayers[0]   = inputLayer;
            nodes.Add(inputLayer.Nodes);

            for (int i = 0; i < layers.Length; i++)
            {
                var ih = layers[i];
                NodeLayers[i + 1] = ih;
                nodes.Add(ih.Nodes);
            }

            for (var i = 0; i < NodeLayers.Length - 1; i++)
            {
                var cl = new LinearWeightLayer(NodeLayers[i], NodeLayers[i + 1]);
                ConnectionLayers.Add(cl);
            }

            StagedNodes = nodes.Select(n => n.ToArray()).ToArray();
            Nodes       = nodes.SelectMany(s => s).ToList();
        }
示例#33
0
        /// <summary>
        /// Create a MNIST trainer (writing recognition) will be added to an environemnt.
        /// </summary>
        /// <param name="sigma">The sigma environemnt this trainer will be assigned to.</param>
        /// <returns>The newly created trainer.</returns>
        private static ITrainer CreateMnistTrainer(SigmaEnvironment sigma)
        {
            ByteRecordReader mnistImageReader    = new ByteRecordReader(headerLengthBytes: 16, recordSizeBytes: 28 * 28, source: new CompressedSource(new MultiSource(new FileSource("train-images-idx3-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-images-idx3-ubyte.gz"))));
            IRecordExtractor mnistImageExtractor = mnistImageReader.Extractor("inputs", new[] { 0L, 0L }, new[] { 28L, 28L }).Preprocess(new NormalisingPreprocessor(0, 255));

            ByteRecordReader mnistTargetReader    = new ByteRecordReader(headerLengthBytes: 8, recordSizeBytes: 1, source: new CompressedSource(new MultiSource(new FileSource("train-labels-idx1-ubyte.gz"), new UrlSource("http://yann.lecun.com/exdb/mnist/train-labels-idx1-ubyte.gz"))));
            IRecordExtractor mnistTargetExtractor = mnistTargetReader.Extractor("targets", new[] { 0L }, new[] { 1L }).Preprocess(new OneHotPreprocessor(minValue: 0, maxValue: 9));

            IDataset dataset = new Dataset("mnist-training", Dataset.BlockSizeAuto, mnistImageExtractor, mnistTargetExtractor);
            ITrainer trainer = sigma.CreateTrainer("test");

            trainer.Network = new Network
            {
                Architecture = InputLayer.Construct(28, 28)
                               + 2 * FullyConnectedLayer.Construct(28 * 28)
                               + FullyConnectedLayer.Construct(10)
                               + OutputLayer.Construct(10)
                               + SoftMaxCrossEntropyCostLayer.Construct()
            };

            trainer.TrainingDataIterator = new MinibatchIterator(8, dataset);
            trainer.Optimiser            = new AdagradOptimiser(baseLearningRate: 0.02);
            trainer.Operator             = new CpuSinglethreadedOperator();

            trainer.AddInitialiser("*.weights", new GaussianInitialiser(standardDeviation: 0.05f));
            trainer.AddInitialiser("*.bias*", new GaussianInitialiser(standardDeviation: 0.01f, mean: 0.03f));

            trainer.AddGlobalHook(new CurrentEpochIterationReporter(TimeStep.Every(1, TimeScale.Iteration)));

            return(trainer);
        }
示例#34
0
        /// <summary>
        /// Initializes a new instance of the <see cref="FirstPersonControl"/> class.
        /// </summary>
        /// <param name="camera">The camera to use</param>
        /// <param name="inputLayer">The input layer that the controls will add input triggers to</param>
        public FirstPersonControl(Camera camera, InputLayer inputLayer)
        {
            _cam = camera;

            inputLayer.RegisterTrigger(new InputTrigger(new KeyPressedCondition(Keys.A, true),
                                                        new InputAction(delegate(GameTime time) {
                Move(-_cam.Right, time);
            })));
            inputLayer.RegisterTrigger(new InputTrigger(new KeyPressedCondition(Keys.D, true),
                                                        new InputAction(delegate(GameTime time) {
                Move(_cam.Right, time);
            })));
            inputLayer.RegisterTrigger(new InputTrigger(new KeyPressedCondition(Keys.W, true),
                                                        new InputAction(delegate(GameTime time) {
                Move(_cam.Direction, time);
            })));
            inputLayer.RegisterTrigger(new InputTrigger(new KeyPressedCondition(Keys.S, true),
                                                        new InputAction(delegate(GameTime time) {
                Move(-_cam.Direction, time);
            })));
            inputLayer.RegisterTrigger(new InputTrigger(new PassThroughCondition(),
                                                        new InputAction(delegate(GameTime time) {
                RotateCamera(time);
            })));
        }
示例#35
0
        public void SerializationTest()
        {
            // Create a InputLayer
            var layer = new InputLayer(5, 5, 3);

            layer.Init(10, 10, 3);

            InputLayer deserialized;

            using (var ms = new MemoryStream())
            {
                // Serialize
                IFormatter formatter = new BinaryFormatter();
                formatter.Serialize(ms, layer);

                // Deserialize
                ms.Position  = 0;
                deserialized = formatter.Deserialize(ms) as InputLayer;
            }

            Assert.AreEqual(layer.InputDepth, deserialized.InputDepth);
            Assert.AreEqual(layer.InputHeight, deserialized.InputHeight);
            Assert.AreEqual(layer.InputWidth, deserialized.InputWidth);
            Assert.AreEqual(layer.OutputDepth, deserialized.OutputDepth);
            Assert.AreEqual(layer.OutputHeight, deserialized.OutputHeight);
            Assert.AreEqual(layer.OutputWidth, deserialized.OutputWidth);
        }
示例#36
0
	/// <summary>
	/// Used for scrolling through the keys
	/// </summary>
	public void OnKeyCode(int axis, float strength, InputLayer layer)
	{
		if (axis == (int)AxisKey.MouseWheelp && scrollbar.value < 1 && layer == inputLayer)
		{
			scrollbar.value += 0.1f * scrollFactor * strength;
		}
		else if (axis == (int)AxisKey.MouseWheelm && scrollbar.value > 0 && layer == inputLayer)
		{
			scrollbar.value -= 0.1f * scrollFactor * strength;
		}
	}
示例#37
0
	/// <summary>
	/// Opens and closes the pausemenu
	/// </summary>
	/// <param name="key">the key that was pressed</param>
	/// <param name="strength">if the key is an AxisKey, the value is the analog strength</param>
	/// <param name="layer">the actual InputLayer</param>
	public void OnKeyDown(Key key, float strength, InputLayer layer)
	{
		if (!UIController.Instance.IsUIWindowOpen(UIWindow.PauseMenu) && layer == InputLayer.Default && key == Key.Menu)
		{
			UIController.Instance.OpenUIWindow(UIWindow.PauseMenu);
		}
		else if (UIController.Instance.IsUIWindowOpen(UIWindow.PauseMenu) && layer == InputLayer.UI && key == Key.Menu)
		{
			UIController.Instance.CloseUIWindow(UIWindow.PauseMenu);
		}
	}
示例#38
0
        public InputLayer Build(int inputSize)
        {
            IList<InputNeuron> neurons = new List<InputNeuron>(inputSize);
            for (int i = 0; i < inputSize; ++i)
            {
                neurons.Add(inputNeuronFactory.Create());
            }

            InputLayer inputLayer = new InputLayer(neurons);

            return inputLayer;
        }
示例#39
0
        public Network(Activation activation, TrainingInfo trainInfo, int inputSize, int[] hiddenSizes, int outputSize)
        {
            this.InputSize = inputSize;
            this.HiddenSizes = hiddenSizes;
            this.OutputSize = outputSize;

            this.activation = activation;
            this.inputLayer = new InputLayer(inputSize);
            this.hiddenLayers = hiddenSizes
                .Select(size => new HiddenLayer(activation, trainInfo, size))
                .ToArray();
            this.outputLayer = new OutputLayer(activation, trainInfo, outputSize);
            ConnectLayers();
        }
示例#40
0
	/// <summary>
	/// Sends and Returns the pokemon
	/// </summary>
	/// <param name="key">the key</param>
	/// <param name="strength">the strength</param>
	/// <param name="layer">the actual layer</param>
	public void OnKeyDown(Key key, float strength, InputLayer layer)
	{
		if (!isOut && key == Key.SendPokemon && layer == InputLayer.Default)
		{
			isOut = true;
			Character.Main.ActivePokemon = new Pokemon();
			HUDController.Instance.HUD = HUD.PokemonHUD;
		}
		else if (isOut && key == Key.ReturnPokemon && layer == InputLayer.Default)
		{
			isOut = false;
			HUDController.Instance.HUD = HUD.PlayerHUD;
			Character.Main.ActivePokemon = null;
		}
	}
示例#41
0
        /// <summary>
        /// Changes the import-to layer name to IMP_importFileName.
        /// If the importFileName is longer than  LayerNameLength, truncate it to LayerNameLength.
        /// If the importFileName contain characters other than alphabet, number, replace it with '_'
        /// </summary>
        /// <returns>Returns true if succeed.</returns>
        public bool ChangeLayerName(InputLayer layer, ref string layerName, bool isToLayer0)
        {
            LayerNameType layerNameType = LayerNameType.LayerNameDirect;

            try
            {
                if (isToLayer0)
                {
                    layerName = "0";
                }
                else
                {
                    string existingName = null;
                    // Truncate the layer name if it's too long
                    existingName = layer.Name;
                    // Replace illegal characters
                    foreach (Char tmp in existingName)
                    {
                        if (!(tmp >= 'A' && tmp <= 'Z'))
                        {
                            if (!(tmp >= 'a' && tmp <= 'z'))
                            {
                                if (!(tmp >='0' && tmp <='9'))
                                {
                                    existingName.Replace(tmp, '_');
                                }
                            }
                        }
                    }

                    // Prefix "IMP_"
                    layerName = string.Concat("IMP_", existingName);
                }

                layer.SetLayerName(layerNameType, layerName);

                return true;
            }
            catch
            {
                return false;
            }
        }
示例#42
0
	/// <summary>
	/// Deselects the currently selected button
	/// </summary>
	/// <param name="axis">the axis</param>
	/// <param name="strength">the strength</param>
	/// <param name="layer">the layer</param>
	public void OnKeyCode(int axis, float strength, InputLayer layer)
	{
		if (UI.Instance.EventSystem != null && UI.Instance.EventSystem.currentSelectedGameObject != null && (axis == (int)AxisKey.MouseXm || axis == (int)AxisKey.MouseXp || axis == (int)AxisKey.MouseYm || axis == (int)AxisKey.MouseYp) && layer == InputLayer.UI)
		{
			UI.Instance.EventSystem.SetSelectedGameObject(null);
		}
	}
示例#43
0
 public OrientingSubsystem(InputLayer inputLayer, OutputLayer outputLayer)
 {
     this.dataFromInputLayer = inputLayer;
     this.outputLayer = outputLayer;
 }
示例#44
0
	public void OnKeyCodeDown(int key, float strength, InputLayer layer)
	{
	}
示例#45
0
        /// <summary>
        /// This function set the OD table name to layerName_OD.
        /// </summary>
        public bool SetTableName(InputLayer layer, string layerName)
        {
            ImportDataMapping tableType;
            tableType = ImportDataMapping.NewObjectDataOnly;
            string tableName = string.Concat(layerName, "_OD");
            string newTableName = null;
            // If the table name already exists, append a number after the table name,
            // until no OD table with the same name found.
            if (TableNameExist(tableName))
            {
                int index = 1;
                do
                {
                    newTableName = string.Concat(tableName, index.ToString());
                    index++;
                }
                while (TableNameExist(newTableName));

                layer.SetDataMapping(tableType, newTableName);
            }
            else
            {
                layer.SetDataMapping(tableType, tableName);
            }

            return true;
        }
	public void OnKeyCode(int key, float strength, InputLayer layer)
	{
		// Position
		if (Highlighted && key == (int)AxisKey.RSYp && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.y > 0 && Time.deltaTime * movementFactor * strength <= RectTransform.anchorMin.y)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y - (Time.deltaTime * movementFactor * strength));
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y - (Time.deltaTime * movementFactor * strength));
			}
			else if (RectTransform.anchorMin.y > 0)
			{
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y - RectTransform.anchorMin.y);
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, 0);
			}
		}
		else if (Highlighted && key == (int)AxisKey.RSYm && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMax.y < 1 && Time.deltaTime * movementFactor * strength <= 1 - RectTransform.anchorMax.y)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y + (Time.deltaTime * movementFactor * strength));
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y + (Time.deltaTime * movementFactor * strength));
			}
			else if (RectTransform.anchorMax.y < 1)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y + 1 - RectTransform.anchorMax.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, 1);
			}
		}
		if (Highlighted && key == (int)AxisKey.RSXp && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMax.x < 1 && Time.deltaTime * movementFactor * strength <= 1 - RectTransform.anchorMax.x)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x + (Time.deltaTime * movementFactor * strength), RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x + (Time.deltaTime * movementFactor * strength), RectTransform.anchorMax.y);
			}
			else if (RectTransform.anchorMax.x < 1)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x + 1 - RectTransform.anchorMax.x, RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(1, RectTransform.anchorMax.y);
			}
		}
		else if (Highlighted && key == (int)AxisKey.RSXm && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.x > 0 && Time.deltaTime * movementFactor * strength <= RectTransform.anchorMin.x)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x - (Time.deltaTime * movementFactor * strength), RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x - (Time.deltaTime * movementFactor * strength), RectTransform.anchorMax.y);
			}
			else if (RectTransform.anchorMin.x > 0)
			{
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x - RectTransform.anchorMin.x, RectTransform.anchorMax.y);
				RectTransform.anchorMin = new Vector2(0, RectTransform.anchorMin.y);
			}
		}

		// Scale
		if (Highlighted && (key == (int)KeyCode.W || key == (int)AxisKey.HatYp) && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.y > 0 && RectTransform.anchorMax.y < 1 && Time.deltaTime * scaleFactor * strength <= RectTransform.anchorMin.y && Time.deltaTime * scaleFactor * strength <= 1 - RectTransform.anchorMax.y)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y - (Time.deltaTime * scaleFactor * strength));
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y + (Time.deltaTime * scaleFactor * strength));
			}
			else if (RectTransform.anchorMin.y > 0 && Time.deltaTime * scaleFactor * strength <= RectTransform.anchorMin.y)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y - (Time.deltaTime * scaleFactor * strength));
			}
			else if (RectTransform.anchorMax.y < 1 && Time.deltaTime * scaleFactor * strength <= 1 - RectTransform.anchorMax.y)
			{
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y + (Time.deltaTime * scaleFactor * strength));
			}
			else
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, 0);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, 1);
			}
		}
		else if (Highlighted && (key == (int)KeyCode.S || key == (int)AxisKey.HatYm) && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.y + 0.01f < RectTransform.anchorMax.y && Time.deltaTime * scaleFactor * 2 * strength <= RectTransform.anchorMax.y - RectTransform.anchorMin.y)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y + (Time.deltaTime * scaleFactor * strength));
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMax.y - (Time.deltaTime * scaleFactor * strength));
			}
			else
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x, RectTransform.anchorMin.y + 0.01f);
			}
		}
		if (Highlighted && (key == (int)KeyCode.A || key == (int)AxisKey.HatXm) && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.x + 0.01f < RectTransform.anchorMax.x && Time.deltaTime * scaleFactor * 2 * strength <= RectTransform.anchorMax.x - RectTransform.anchorMin.x)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x + (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x - (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMax.y);
			}
			else
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x, RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMin.x + 0.01f, RectTransform.anchorMax.y);
			}
		}
		else if (Highlighted && (key == (int)KeyCode.D || key == (int)AxisKey.HatXp) && layer == InputLayer.UI)
		{
			if (RectTransform.anchorMin.x > 0 && RectTransform.anchorMax.x < 1 && Time.deltaTime * scaleFactor * strength <= RectTransform.anchorMin.x && Time.deltaTime * scaleFactor <= 1 - RectTransform.anchorMax.x)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x - (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x + (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMax.y);
			}
			else if (RectTransform.anchorMin.x > 0 && Time.deltaTime * scaleFactor * strength <= RectTransform.anchorMin.x)
			{
				RectTransform.anchorMin = new Vector2(RectTransform.anchorMin.x - (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMin.y);
			}
			else if (RectTransform.anchorMax.x < 1 && Time.deltaTime * scaleFactor * strength <= 1 - RectTransform.anchorMax.x)
			{
				RectTransform.anchorMax = new Vector2(RectTransform.anchorMax.x + (Time.deltaTime * scaleFactor * strength), RectTransform.anchorMax.y);
			}
			else
			{
				RectTransform.anchorMin = new Vector2(0, RectTransform.anchorMin.y);
				RectTransform.anchorMax = new Vector2(1, RectTransform.anchorMax.y);
			}
		}
	}
示例#47
0
 public AttentionalSubsystem(OrientingSubsystem orientingSubsystem, InputLayer inputLayer, OutputLayer outputLayer)
 {
     this.orientingSubsystem = orientingSubsystem;
     this.inputLayer = inputLayer;
     this.outputLayer = outputLayer;
 }