Beispiel #1
0
 public Connection(UInt64 _conn_id, INet _net, Socket _socket, ISession _sess)
 {
     conn_id = _conn_id;
     net     = _net;
     socket  = _socket;
     session = _sess;
 }
Beispiel #2
0
 public InfinityScroll(IBlue blue, IRed red, INet net, IShowData showData)
 {
     _blue     = blue;
     _red      = red;
     _net      = net;
     _showData = showData;
 }
        private static void AddNetInfos(IStep step, IODBLayer parentLayer, String NetName, IODBObject netItem)
        {
            if (NetName.Length == 0) //the default net
            {
                NetName = "$NONE$";
            }

            int  lastNetNr = step.GetNets().Count;
            INet outNet    = step.GetNet(NetName);
            int  netNr     = -1;

            if (outNet == null)
            {
                netNr = step.AddNet(NetName, "", out outNet);
            }
            else
            {
                netNr = outNet.GetNetNumber();
            }

            netItem.PcbNetNumber = outNet.GetNetNumber();                          //set the netinformation to the object
            int newNr = outNet.AddLayerRef(parentLayer.GetLayerName(), lastNetNr); //each net saves a list of all layers are used.

            if (newNr == lastNetNr)
            {
                lastNetNr++;
            }
            parentLayer.SetNetNumber(NetName, netItem.PcbNetNumber);                                       //for each object the layer needs information that this net is used.
            outNet.AddFID(PCBI.FidType.Copper, parentLayer.GetLayerName(), netItem.GetIndexOnLayer(), ""); //this is specific to ODB++, if you don't use it and save the data as ODB++ some information get lost.
        }
Beispiel #4
0
        protected static double GetPatternError(INet net, ILearningPattern pattern)
        {
            var actual = net.Propagate(pattern.Input);
            var expected = pattern.Output;

            var errors = new List<IFuzzyNumber>();
            var i = 0;
            foreach (var actualNumber in actual)
            {
                errors.Add(actualNumber.Sub(expected.ElementAt(i)));
                i++;
            }

            var patternError = 0.0;
            foreach (var errorNumber in errors)
            {
                var leftError = 0.0;
                var rightError = 0.0;
                errorNumber.ForeachLevel((alpha, level) =>
                {
                    leftError += alpha * (level.X * level.X);
                    rightError += alpha * (level.Y * level.Y);
                });

                var currentOutputError = leftError + rightError;
                patternError += currentOutputError;
            }

            return patternError / 2.0;
        }
Beispiel #5
0
 public static INetData Data(this INet net)
 {
     return(new NetData(
                net.Id,
                net.AllNeurons.Select(n => n.Data()).ToList(), net.AllConnections.Select(c => c.Data()).ToList(),
                net.InputNeurons.Select(n => n.Id).ToList(), net.OutputNeurons.Select(n => n.Id).ToList()));
 }
        //here we have _gradient as sum of gradients, should make step in this direction
        protected override bool LearnBatch(INet net, double currentLearningCycleError)
        {
            if (_prevStep != null)
            {
                var y = _gradient.Negate().Sum(_prevGradient); //yk
                //_gradientDiffNorm = y.Norm;
                //Console.WriteLine("Gradient diff norm: {0}", _gradientDiffNorm.GetMod().X);
                //Console.WriteLine();
                //if (IsNetLearned(currentLearningCycleError))
                //    return;
                //its time to calculate b(k + 1)
                _b = CalculateInvertedPseudoGaussian(_b, _prevStep, y);
            }

            var direction = CalculateMinimizeDirection(_b, _gradient); //pk - direction of next step
            var step = MakeStep(direction, net, currentLearningCycleError); //step = alpha*pk
            //var step = MakeStepGoldstein(direction, net, currentLearningCycleError, _gradient); //step = alpha*pk
            if (step == null)
            {

            }

            //Save step and grad
            _prevStep = step;
            _prevGradient = _gradient;
            //clear gradient vector
            _gradient = null;

            return true;
        }
Beispiel #7
0
        public bool OnAccept(INet net)
        {
            if (net == null)
                return false;

            return true;
        }
Beispiel #8
0
 public static void SaveNet(this INet net, string filename)
 {
     using (var fs = new FileStream(filename, FileMode.Create))
     {
         net.SaveBinary(fs);
     }
 }
Beispiel #9
0
        public void Execute(IPCBIWindow parent)
        {
            wdlg = new PCB_Investigator.PCBIWindows.PCBIWorkingDialog();
            wdlg.SetAnimationStatus(false);
            wdlg.SetStatusPercent(0);
            wdlg.SetStatusText("Working");
            wdlg.CanCancel(true);

            IStep curStep = parent.GetCurrentStep();

            if (curStep == null)
            {
                return;
            }

            Dictionary <string, double> smallestDiameterList = new Dictionary <string, double>();
            StringBuilder sbResult = new StringBuilder();

            wdlg.ShowWorkingDlgAsThread();

            List <string> netNames  = curStep.GetAllNetNames();
            double        value     = 0;
            double        valueStep = ((100.0 / netNames.Count));


            foreach (string netName in curStep.GetAllNetNames())
            {
                INet net = curStep.GetNet(netName);

                wdlg.SetStatusText("Working on " + netName + "...");
                value += valueStep;
                wdlg.SetStatusPercent((int)(value));

                List <IODBObject> allNetElements = net.GetAllNetObjects(parent);
                if (allNetElements.Count == 0)
                {
                    continue;
                }

                double smallestDiameter = allNetElements[0].GetDiameter();
                foreach (IODBObject netElement in allNetElements)
                {
                    double currentDiameter = netElement.GetDiameter();
                    if (currentDiameter < 0)
                    {
                        continue;                      //e.g. surfaces have no diameter
                    }
                    if (currentDiameter < smallestDiameter)
                    {
                        smallestDiameter = currentDiameter;
                    }
                }

                smallestDiameterList.Add(netName, smallestDiameter);
                sbResult.AppendLine(netName + ": " + smallestDiameter.ToString() + " mils");
            }
            wdlg.Dispose();
            PCB_Investigator.Localization.PCBILocalization.ShowMsgBox("All smallest Net Diameters:" + Environment.NewLine + sbResult.ToString(), "Result", MessageBoxButtons.OK, MessageBoxIcon.Information);
        }
 public YoutubeService(INet net)
 {
     if (net == null)
     {
         throw new ArgumentNullException("net");
     }
     this.net = net;
 }
Beispiel #11
0
        public void Load()
        {
            this.net = NetExtension.LoadNet($"{this.Prefix}net.dat");

            FluentNet fluentNet = net as FluentNet;

            this.inputWidth = fluentNet.InputLayers[0].InputWidth;
        }
Beispiel #12
0
 public static void SaveNetState(string filename, INet net)
 {
     using (var fs = new FileStream(filename, FileMode.Create, FileAccess.Write, FileShare.ReadWrite))
     {
         var bf = new BinaryFormatter();
         bf.Serialize(fs, net);
     }
 }
Beispiel #13
0
        public IConnection Create(INet net, Socket socket, ISession session)
        {
            ++next_id;
            Connection conn = new Connection(next_id, net, socket, session);

            dict[next_id] = conn;
            return(conn);
        }
 protected override void PrepareToLearning(INet net)
 {
     _b = Matrix.CreateI(net.WeightsCount, net.WeightsCount, () => new RealNumber(1), () => new RealNumber(0)); //b0
     _weights = net.GetWeights(); //x0
     _inputs = net.GetLastInputsForWeights();
     _gradient = null;
     _prevGradient = null;
     _prevStep = null;
 }
Beispiel #15
0
        protected override void LearnPattern(INet net, ILearningPattern learningPattern, double currentPatternError)
        {
            //call only after net.propagation()
            PropagateErrorOnLayers(net.Layers, learningPattern.Output);
            CalculateWeightDelta(net);

            //ChangeAndSetWeights(_deltas, net);
            //_deltas = null;
        }
Beispiel #16
0
 public void Accept(INet sourceNet)
 {
     _network = Network.Load(sourceNet.Persistence()) as ActivationNetwork;
     _teacher = new BackPropagationLearning(_network)
     {
         LearningRate = _learningRate,
         Momentum     = 0.9
     };
 }
Beispiel #17
0
        public TrainingScheme(INet net, TrainerBase trainer, EntryContainer container, string label)
        {
            this.container = container;
            this.label     = label;
            this.net       = net;
            this.trainer   = trainer;

            int n = container.ClassCount;
        }
Beispiel #18
0
 public bool Create()
 {
     m_pListenNet = libNet.Network.MyNetwork.Listen("0.0.0.0:1111",this);
     if (m_pListenNet == null)
     {
         return false;
     }
     ThreadPool.QueueUserWorkItem(ClientProcesFunc, this);
     return true;
 }
Beispiel #19
0
 public AdamTrainer(INet <T> net) : base(net)
 {
     if (typeof(T) == typeof(double))
     {
         this.Eps = (T)(ValueType)1e-8;
     }
     else if (typeof(T) == typeof(float))
     {
         this.Eps = (T)(ValueType)(float)1e-8;
     }
 }
Beispiel #20
0
        private void MnistDemo()
        {
            Directory.CreateDirectory(mnistFolder);

            string trainingLabelFilePath = Path.Combine(mnistFolder, trainingLabelFile);
            string trainingImageFilePath = Path.Combine(mnistFolder, trainingImageFile);
            string testingLabelFilePath  = Path.Combine(mnistFolder, testingLabelFile);
            string testingImageFilePath  = Path.Combine(mnistFolder, testingImageFile);

            // Download Mnist files if needed
            Console.WriteLine("Downloading Mnist training files...");
            DownloadFile(urlMnist + trainingLabelFile, trainingLabelFilePath);
            DownloadFile(urlMnist + trainingImageFile, trainingImageFilePath);
            Console.WriteLine("Downloading Mnist testing files...");
            DownloadFile(urlMnist + testingLabelFile, testingLabelFilePath);
            DownloadFile(urlMnist + testingImageFile, testingImageFilePath);

            // Load data
            Console.WriteLine("Loading the datasets...");
            this.training = MnistReader.Load(trainingLabelFilePath, trainingImageFilePath);
            this.testing  = MnistReader.Load(testingLabelFilePath, testingImageFilePath);

            if (this.training.Count == 0 || this.testing.Count == 0)
            {
                Console.WriteLine("Missing Mnist training/testing files.");
                Console.ReadKey();
                return;
            }

            // Create network
            this.net = FluentNet.Create(24, 24, 1)
                       .Conv(5, 5, 8).Stride(1).Pad(2)
                       .Relu()
                       .Pool(2, 2).Stride(2)
                       .Conv(5, 5, 16).Stride(1).Pad(2)
                       .Relu()
                       .Pool(3, 3).Stride(3)
                       .FullyConn(10)
                       .Softmax(10)
                       .Build();

            this.trainer = new AdadeltaTrainer(this.net)
            {
                BatchSize = 20,
                L2Decay   = 0.001,
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var sample = this.SampleTrainingInstance();
                this.Step(sample);
            } while (!Console.KeyAvailable);
        }
Beispiel #21
0
        /// <exception cref="Exception">When called concurrently with <see cref="TickAsync"/>.</exception>
        public INet CreatePlace(out Place place, string description = null)
        {
            if (_tickTask != null && !_tickTask.IsCompleted)
            {
                throw new Exception($"{nameof(CreateTransition)} called during tick.");
            }

            INet net = CreatePlace <Place>(out Place instance, description);

            place = (Place)instance;
            return(net);
        }
Beispiel #22
0
    public static INet LoadNet(string filename)
    {
        INet result = null;

        if (File.Exists(filename))
        {
            using (var fs = new FileStream(filename, FileMode.Open))
            {
                result = SerializationExtensions.LoadBinary(fs);
            }
        }

        return(result);
    }
    private void sendNetPacket(MsgType t, INet outmsg)
    {
        NetPacket    msg    = new NetPacket();
        MemoryStream stream = new MemoryStream();
        BinaryWriter buffer = new BinaryWriter(stream);

        outmsg.Serialize(buffer);

        if (buffer.BaseStream.Length + NetPacket.DEFAULT_FRAME_LEN > 512)
        {
            msg.message_type = (byte)MsgType.Multipart;
            //  calculate how many parts we have to split this into
            int maxsize = 512 - (12 + NetPacket.DEFAULT_FRAME_LEN);
            int parts   = ((int)buffer.BaseStream.Length / maxsize) + 1;
            this.multi_groupid++;
            int bstart = 0;
            for (int i = 0; i < parts; i++)
            {
                int bend = bstart + maxsize;
                if (i + 1 == parts)
                {
                    bend = bstart + (((int)buffer.BaseStream.Length) % maxsize);
                }
                Multipart wrapper = new Multipart();
                wrapper.ID       = (ushort)i;
                wrapper.GroupID  = this.multi_groupid;
                wrapper.NumParts = (ushort)parts;
                wrapper.Content  = new byte[bend - bstart];
                buffer.BaseStream.Read(wrapper.Content, bstart, bend - bstart);

                MemoryStream pstream = new MemoryStream();
                BinaryWriter pbuffer = new BinaryWriter(pstream);
                wrapper.Serialize(pbuffer);

                msg.content        = pstream.ToArray();
                msg.content_length = (ushort)pstream.Length;
                this.sending_socket.Send(msg.MessageBytes());
                bstart = bend;
            }
        }
        else
        {
            msg.content        = stream.ToArray();
            msg.content_length = (ushort)msg.content.Length;
            msg.message_type   = (byte)t;
            this.sending_socket.Send(msg.MessageBytes());
        }
    }
Beispiel #24
0
        private void MnistDemo()
        {
            var datasets = new DataSets();

            if (!datasets.Load(100))
            {
                return;
            }

            // Create network
            this._net = FluentNet <double> .Create(24, 24, 1)
                        .Conv(5, 5, 8).Stride(1).Pad(2)
                        .Relu()
                        .Pool(2, 2).Stride(2)
                        .Conv(5, 5, 16).Stride(1).Pad(2)
                        .Relu()
                        .Pool(3, 3).Stride(3)
                        .FullyConn(10)
                        .Softmax(10)
                        .Build();

            this._trainer = new SgdTrainer <double>(this._net)
            {
                LearningRate = 0.01,
                BatchSize    = 20,
                L2Decay      = 0.001,
                Momentum     = 0.9
            };

            Console.WriteLine("Convolutional neural network learning...[Press any key to stop]");
            do
            {
                var trainSample = datasets.Train.NextBatch(this._trainer.BatchSize);
                Train(trainSample.Item1, trainSample.Item2, trainSample.Item3);

                var testSample = datasets.Test.NextBatch(this._trainer.BatchSize);
                Test(testSample.Item1, testSample.Item3, this._testAccWindow);

                Console.WriteLine("Loss: {0} Train accuracy: {1}% Test accuracy: {2}%", this._trainer.Loss,
                                  Math.Round(this._trainAccWindow.Items.Average() * 100.0, 2),
                                  Math.Round(this._testAccWindow.Items.Average() * 100.0, 2));

                Console.WriteLine("Example seen: {0} Fwd: {1}ms Bckw: {2}ms", this._stepCount,
                                  Math.Round(this._trainer.ForwardTimeMs, 2),
                                  Math.Round(this._trainer.BackwardTimeMs, 2));
            } while (!Console.KeyAvailable);
        }
Beispiel #25
0
        public Application(IApplicationListener listener, ApplicationConfiguration config)
        {
            initializeGlfw();
            setApplicationLogger(new ApplicationLogger());
            this.config = ApplicationConfiguration.copy(config);
            if (this.config.title == null)
            {
                this.config.title = listener.GetType().Name;
            }
            Gdx.app = this;
            if (!config.disableAudio)
            {
                try
                {
                    //this.audio = Gdx.audio = new OpenALAudio(config.audioDeviceSimultaneousSources, config.audioDeviceBufferCount, config.audioDeviceBufferSize);
                }
                catch (Exception t)
                {
                    log("Lwjgl3Application", "Couldn't initialize audio, disabling audio", t);
                    //this.audio = Gdx.audio = new MockAudio();
                }
            }
            else
            {
                //this.audio = Gdx.audio = new MockAudio();
            }

            files     = Gdx.files = new Files();
            net       = Gdx.net = new Net();
            clipboard = new Clipboard();

            var window = createWindow(config, listener, null);

            windows.Add(window);

            loop();
            cleanupWindows();
            cleanup();
        }
        public static void SaveBinary(this INet net, Stream stream)
        {
            IFormatter formatter = new BinaryFormatter();

            formatter.Serialize(stream, net);
        }
Beispiel #27
0
        public double Train(INet Net, int Epochs = 1000)
        {
            Dictionary<INode, double> deltas = new Dictionary<INode,double>();
            foreach(var nodeLayer in Net.NodeLayers)
            {
                foreach(var node in nodeLayer.Nodes)
                {
                    deltas.Add(node, 0.0);
                }
            }

            for (int i = 0; i < Epochs; i++)
            {
                foreach (var dataSet in DataSets)
                {
                    double error = 0;
                    var results = Net.Calculate(dataSet.Inputs, dataSet.Outputs, ref error);

                    // set delta of output nodes
                    for(int r = 0; r < results.Length; r++)
                    {
                        double delta = (dataSet.Outputs[r] - results[r]) * results[r] * (1 - results[r]);
                        deltas[Net.NodeLayers.Last().Nodes[r]] = delta;
                    }

                    for(int l = Net.NodeLayers.Length - 2; l >= 0; l--)
                    {
                        for(int l2 = 0; l2 < Net.NodeLayers[l].Nodes.Length; l2++)
                        {
                            var node = Net.NodeLayers[l].Nodes[l2];
                            double delta = 0;
                            foreach(var linkedNode in Net.NodeLayers[l+1].Nodes)
                            {
                                // add delta * weight of that node
                                delta += node.Result * (1 - node.Result) * linkedNode.Weights[l2] * deltas[linkedNode];

                            }
                            // save delta for other nodes
                            deltas[node] = delta;

                        }
                    }
                    foreach (var delta in deltas)
                    {
                        AdjustNode(delta.Key, dataSet.Inputs, delta.Value);
                    }
                }
            }

            double SSE = 0;
            foreach (var dataSet in DataSets)
            {
                double error = 0;
                var result = Net.Calculate(dataSet.Inputs, dataSet.Outputs, ref error);
                SSE += error;
            }
            return SSE;
        }
Beispiel #28
0
 public PropagationViewModel(Action<ViewModelBase> nextViewModel, string filename)
     : base(nextViewModel)
 {
     _net = BinaryFileSerializer.LoadNetState(filename);
 }
Beispiel #29
0
 private static void SetPropagatedErrorToZero(INet net)
 {
     foreach (var layer in net.Layers)
     {
         layer.ForeachNeuron((i, neuron) =>
         {
             neuron.PropagatedError = new RealNumber(0.0);
         });
     }
 }
Beispiel #30
0
        public static void Run()
        {
            const string NetName = "net.dat";

            var random         = new Random();
            var entryContainer = new EntryContainer();

            #region Load Net

            var convInputWith = 11; // Will extract 11x11 area
            if (convInputWith % 2 == 0)
            {
                throw new ArgumentException("convInputWith must be odd");
            }

            // Load IA or initialize new network if not found - Direction choice
            INet singleNet = NetExtension.LoadOrCreateNet(NetName, () =>
            {
                var net = FluentNet.Create(convInputWith, convInputWith, 3)
                          .Conv(3, 3, 16).Stride(2)
                          .Tanh()
                          .Conv(2, 2, 16)
                          .Tanh()
                          .FullyConn(100)
                          .Relu()
                          .FullyConn(5)
                          .Softmax(5).Build();

                return(net);
            });

            #endregion

            #region Load data

            var hltFiles = Directory.EnumerateFiles(@"..\..\..\games\2609\", "*.hlt").ToList(); // erdman games downloaded with HltDownloader
            int total    = hltFiles.Count;
            Console.WriteLine($"Loading {total} games...");

            foreach (var file in hltFiles)
            {
                Console.WriteLine(total--);
                HltReader reader = new HltReader(file);

                var playerId     = -1;
                var playerToCopy = reader.PlayerNames.FirstOrDefault(o => o.StartsWith("erdman"));

                if (playerToCopy == null)
                {
                    Console.WriteLine("Player not found");
                    continue;
                }

                playerId = reader.PlayerNames.IndexOf(playerToCopy) + 1;

                var width  = reader.Width;
                var height = reader.Height;

                int lastmoveCount = 1;

                for (var frame = 0; frame < reader.FrameCount - 1; frame++)
                {
                    var currentFrame = reader.GetFrame(frame);
                    var map          = currentFrame.map;
                    var moves        = currentFrame.moves;

                    var helper = new Helper(map, (ushort)playerId);

                    bool foundInFrame = false;
                    int  moveCount    = 0;

                    // moves
                    for (ushort x = 0; x < width; x++)
                    {
                        for (ushort y = 0; y < height; y++)
                        {
                            if (map[x, y].Owner == playerId)
                            {
                                foundInFrame = true;
                                moveCount++;

                                if (random.NextDouble() < 1.0 / lastmoveCount)
                                {
                                    var convVolume = map.GetVolume(convInputWith, playerId, x, y); // Input
                                    var direction  = moves[y][x];                                  // Output

                                    var entry1 = new Entry(new[] { convVolume }, direction, x, y, frame, file.GetHashCode());
                                    entryContainer.Add(entry1);

                                    // Data augmentation
                                    var entry2 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.LeftRight) }, (int)Helper.FlipLeftRight((Direction)direction), x, y, frame, file.GetHashCode());
                                    entryContainer.Add(entry2);
                                    var entry3 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.UpDown) }, (int)Helper.FlipUpDown((Direction)direction), x, y, frame, file.GetHashCode());
                                    entryContainer.Add(entry3);
                                    var entry4 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.Both) }, (int)Helper.FlipBothWay((Direction)direction), x, y, frame, file.GetHashCode());
                                    entryContainer.Add(entry4);
                                }
                            }
                        }
                    }

                    lastmoveCount = moveCount;

                    if (!foundInFrame)
                    {
                        // player has died
                        break;
                    }
                }
            }

            var length = entryContainer.Shuffle();
            Console.WriteLine(entryContainer.Summary);

            #endregion

            #region Training

            var trainer = new AdamTrainer(singleNet)
            {
                BatchSize = 1024, LearningRate = 0.1, Beta1 = 0.9, Beta2 = 0.99, Eps = 1e-8
            };
            var    trainingScheme    = new TrainingScheme(singleNet, trainer, entryContainer, "single");
            bool   save              = true;
            double lastValidationAcc = 0.0;

            do
            {
                for (int i = 0; i < 1000; i++)
                {
                    if (i > 5)
                    {
                        trainer.L2Decay = 0.001;
                    }

                    Console.WriteLine($"Epoch #{i + 1}");

                    if (i % 15 == 0)
                    {
                        trainer.LearningRate = Math.Max(trainer.LearningRate / 10.0, 0.00001);
                    }

                    trainingScheme.RunEpoch();

                    #region Save Nets

                    if (save)
                    {
                        // Save if validation accuracy has improved
                        if (trainingScheme.ValidationAccuracy > lastValidationAcc)
                        {
                            lastValidationAcc = trainingScheme.ValidationAccuracy;
                            singleNet.SaveNet(NetName);
                        }
                    }
                    #endregion

                    if (Console.KeyAvailable)
                    {
                        break;
                    }
                }
            } while (!Console.KeyAvailable);

            #endregion
        }
Beispiel #31
0
 public SgdTrainer(INet <double> net) : base(net)
 {
 }
        private IVector MakeStep(IVector direction, INet net, double currentError)
        {
            const int maximumNumberOfTry = 50;
            var numberOfTry = 0;
            double error;
            var step = direction.Mul(_alpha); ;

            var oldWeights = _weights;
            //can change alpha by minimizing it in f(xk + alpha*direction)
            do
            {
                if (numberOfTry > maximumNumberOfTry || _alpha < 0.000000000001)
                    break;

                _weights = oldWeights.Sum(step); //x(k+1) = xk + sk
                net.SetWeights(_weights); //content of _weights now shared between net and _weights vector
                error = GetBatchError(net);

                _alpha /= 2.1;
                step = direction.Mul(_alpha);
                numberOfTry++;
            } while (error > currentError);

            if (numberOfTry > maximumNumberOfTry || _alpha < 0.000000000001)
            {
                Console.WriteLine("Simple step was performed. Too little alpha: {0:0.#############}.", _alpha);
                //step = direction.Mul(_alpha);
                _weights = oldWeights.Sum(direction.Mul(0.1));
                net.SetWeights(_weights);
                //AddLittleCorrectionToWeights(net.Layers);
                //_gradient = null;
                _alpha = 1.0;
                return null;
            }

            _alpha = 1.0;
            return step;
        }
Beispiel #33
0
 private void ChangeAndSetWeights(IVector delta, INet net)
 {
     _weights = _weights.Sum(delta);
     net.SetWeights(_weights);
 }
Beispiel #34
0
 protected virtual double CalculatePatternError(INet net, ILearningPattern pattern)
 {
     return GetPatternError(net, pattern);
 }
Beispiel #35
0
 protected double GetBatchError(INet net)
 {
     return _patterns.Sum(learningPattern => CalculatePatternError(net, learningPattern));
 }
Beispiel #36
0
 protected abstract bool LearnBatch(INet net, double currentLearningCycleError);
Beispiel #37
0
 public void LearnNet(INet net)
 {
     _worker.RunWorkerAsync(net);
 }
Beispiel #38
0
 protected abstract void PrepareToLearning(INet net);
Beispiel #39
0
 internal Place(INet net, string description)
 {
     _net         = net;
     _description = description;
 }
        private IVector MakeStepGoldstein(IVector direction, INet net, double currentError, IVector gradient)
        {
            const int maximumNumberOfTry = 50;
            var numberOfTry = 0;
            double error;
            var step = direction.Mul(_alpha); ;
            var c = 0.1;
            var p = _random.NextDouble()*0.3 + 0.3;

            var oldWeights = _weights;
            //can change alpha by minimizing it in f(xk + alpha*direction)
            var threshold = 0.0;
            do
            {
                if (numberOfTry > maximumNumberOfTry || _alpha < 0.00001)
                    break;

                _weights = oldWeights.Sum(step); //x(k+1) = xk + sk
                net.SetWeights(_weights); //content of _weights now shared between net and _weights vector
                error = GetBatchError(net);

                threshold = gradient.Mul(step).Mul(_alpha).Mul(c).GetMod().X;

                _alpha *= p;
                step = direction.Mul(_alpha);
                numberOfTry++;
            } while (error > currentError + threshold);

            if (numberOfTry > maximumNumberOfTry || _alpha < 0.00001)
            {
                _weights = oldWeights;
                net.SetWeights(_weights);
                //will make gradient descent on next step
                _gradient = null;
                _b = Matrix.CreateI(net.WeightsCount, net.WeightsCount, () => new RealNumber(1), () => new RealNumber(0)); //b0
                _alpha = 1.0;
                Console.WriteLine("Reset matrix");
                return null;
            }

            _alpha = 1.0;
            return step;
        }
Beispiel #41
0
 protected override bool LearnBatch(INet net, double currentLearningCycleError)
 {
     ChangeAndSetWeights(_deltas, net);
     _deltas = null;
     return true;
 }
Beispiel #42
0
 private void CalculateWeightDelta(INet net)
 {
     var gradient = CreateWeightsGradient(net.Layers);
     var currentDelta = gradient.Mul(_alpha);
     _deltas = _deltas == null ? currentDelta : _deltas.Sum(currentDelta);
 }
 //here we summarize gradient of each pattern
 protected override void LearnPattern(INet net, ILearningPattern learningPattern, double currentPatternError)
 {
     PropagateErrorOnLayers(net.Layers, learningPattern.Output); //nablaF(xk)
     var currentGradient = CreateWeightsGradient(net.Layers);
     _gradient = _gradient == null ? currentGradient : _gradient.Sum(currentGradient);
 }
Beispiel #44
0
 protected override void PrepareToLearning(INet net)
 {
     _weights = net.GetWeights();
     _outputs = net.GetLastInputsForWeights();
 }
Beispiel #45
0
 public override void OnRecvice(INet net, byte[] data, short len)
 {
 }
Beispiel #46
0
 public NeuroEvolveBot(INet net, int seed = 3, string name = "NeuroEvolve")
 {
     rand    = new Random(seed);
     network = net;
     Name    = name;
 }
Beispiel #47
0
        protected int K; // iteration counter

        protected TrainerBase(INet net)
        {
            this.Net = net;

            this.BatchSize = 1;
        }
 public SgdTrainer(INet <T> net, T learningRate) : base(net)
 {
     this.LearningRate = learningRate;
     this.Optimizer    = new GradientDescentOptimizer <T>(learningRate);
 }
 protected TrainerBase(INet <T> net)
 {
     this.Net = net;
 }
 public AdamTrainer(INet <float> net) : base(net)
 {
 }
Beispiel #51
0
 public SgdTrainer(INet <T> net) : base(net)
 {
 }
        public static void Run()
        {
            var random = new Random(RandomUtilities.Seed);

            int normalInputWidth = 19;
            int earlyInputWidth  = 19;
            int strongInputWidth = 19;

            string NetName        = $"net.dat";
            string NetName_early  = $"net_early.dat";
            string NetName_strong = $"net_strong.dat";

            var entryContainer        = new EntryContainer();
            var entryContainer_early  = new EntryContainer();
            var entryContainer_strong = new EntryContainer();

            #region Load Net

            INet singleNet = NetExtension.LoadOrCreateNet(NetName, () =>
            {
                var net = FluentNet.Create(normalInputWidth, normalInputWidth, 3)
                          .Conv(5, 5, 16).Stride(5).Pad(2)
                          .Tanh()
                          .Conv(3, 3, 16).Stride(1).Pad(1)
                          .Tanh()
                          .FullyConn(100)
                          .Relu()
                          .FullyConn(5)
                          .Softmax(5).Build();

                return(net);
            });

            INet singleNet_early = NetExtension.LoadOrCreateNet(NetName_early, () =>
            {
                var net = FluentNet.Create(earlyInputWidth, earlyInputWidth, 3)
                          .Conv(5, 5, 16).Stride(5).Pad(2)
                          .Tanh()
                          .Conv(3, 3, 16).Stride(1).Pad(1)
                          .Tanh()
                          .FullyConn(100)
                          .Relu()
                          .FullyConn(5)
                          .Softmax(5).Build();

                return(net);
            });

            INet singleNet_strong = NetExtension.LoadOrCreateNet(NetName_strong, () =>
            {
                var net = FluentNet.Create(strongInputWidth, strongInputWidth, 3)
                          .Conv(5, 5, 16).Stride(5).Pad(2)
                          .Tanh()
                          .Conv(3, 3, 16).Stride(1).Pad(1)
                          .Tanh()
                          .FullyConn(100)
                          .Relu()
                          .FullyConn(5)
                          .Softmax(5).Build();

                return(net);
            });

            #endregion

            #region Load data

            var hltFiles = Directory.EnumerateFiles(@"..\..\..\games\2609\", "*.hlt").ToList(); // erdman games downloaded with HltDownloader
            int total    = hltFiles.Count;
            Console.WriteLine($"Loading {total} games...");

            foreach (var file in hltFiles)
            {
                Console.WriteLine(total--);
                HltReader reader = new HltReader(file);

                var playerId     = -1;
                var playerToCopy = reader.PlayerNames.FirstOrDefault(o => o.StartsWith("erdman"));

                if (playerToCopy != null)
                {
                    playerId = reader.PlayerNames.IndexOf(playerToCopy) + 1;
                }

                if (playerId != -1)
                {
                    var width  = reader.Width;
                    var height = reader.Height;

                    int lastmoveCount = 1;

                    for (var frame = 0; frame < reader.FrameCount - 1; frame++)
                    {
                        bool earlyGame = lastmoveCount < 25;

                        var currentFrame = reader.GetFrame(frame);
                        var map          = currentFrame.map;
                        var moves        = currentFrame.moves;

                        var helper = new Helper(map, (ushort)playerId);

                        bool foundInFrame = false;
                        int  moveCount    = 0;

                        // moves
                        for (ushort x = 0; x < width; x++)
                        {
                            for (ushort y = 0; y < height; y++)
                            {
                                if (map[x, y].Owner == playerId)
                                {
                                    bool strong = map[x, y].Strength > 200;
                                    foundInFrame = true;
                                    moveCount++;

                                    if ((earlyGame && random.NextDouble() < 1.5 / lastmoveCount) || (strong && random.NextDouble() < 1.5 / lastmoveCount) || random.NextDouble() < 1.0 / lastmoveCount)
                                    {
                                        var w         = normalInputWidth;
                                        var container = entryContainer;

                                        if (earlyGame)
                                        {
                                            w         = earlyInputWidth;
                                            container = entryContainer_early;
                                        }
                                        else if (strong)
                                        {
                                            w         = strongInputWidth;
                                            container = entryContainer_strong;
                                        }

                                        var convVolume = map.GetVolume(w, playerId, x, y);

                                        var direction = moves[y][x];

                                        var entry1 = new Entry(new[] { convVolume }, direction, x, y, frame, file.GetHashCode());
                                        container.Add(entry1);
                                        var entry2 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.LeftRight) }, (int)Helper.FlipLeftRight((Direction)direction), x, y, frame, file.GetHashCode());
                                        container.Add(entry2);
                                        var entry3 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.UpDown) }, (int)Helper.FlipUpDown((Direction)direction), x, y, frame, file.GetHashCode());
                                        container.Add(entry3);
                                        var entry4 = new Entry(new[] { convVolume.Flip(VolumeUtilities.FlipMode.Both) }, (int)Helper.FlipBothWay((Direction)direction), x, y, frame, file.GetHashCode());
                                        container.Add(entry4);
                                    }
                                }
                            }
                        }

                        lastmoveCount = moveCount;

                        if (!foundInFrame)
                        {
                            // player has died
                            break;
                        }
                    }
                }
                else
                {
                    Console.WriteLine("not found");
                }
            }

            var length = entryContainer.Shuffle();
            Console.WriteLine("normal: " + entryContainer.Summary);
            length = entryContainer_early.Shuffle();
            Console.WriteLine("early: " + entryContainer_early.Summary);
            length = entryContainer_strong.Shuffle();
            Console.WriteLine("strong " + entryContainer_strong.Summary);

            #endregion

            #region Training

            var trainer = new AdamTrainer(singleNet)
            {
                BatchSize = 1024, LearningRate = 0.01, Beta1 = 0.9, Beta2 = 0.99, Eps = 1e-8
            };
            var trainingScheme = new TrainingScheme(singleNet, trainer, entryContainer, "single");

            var trainer_early = new AdamTrainer(singleNet_early)
            {
                BatchSize = 1024, LearningRate = 0.01, Beta1 = 0.9, Beta2 = 0.99, Eps = 1e-8
            };
            var trainingScheme_early = new TrainingScheme(singleNet_early, trainer_early, entryContainer_early, "single_early");

            var trainer_strong = new AdamTrainer(singleNet_strong)
            {
                BatchSize = 1024, LearningRate = 0.01, Beta1 = 0.9, Beta2 = 0.99, Eps = 1e-8
            };
            var trainingScheme_strong = new TrainingScheme(singleNet_strong, trainer_strong, entryContainer_strong, "single_strong");

            bool   save = true;
            double lastValidationAcc        = 0.0;
            double lastValidationAcc_early  = 0.0;
            double lastValidationAcc_strong = 0.0;
            double lastTrainAcc             = 0.0;
            double lastTrainAcc_early       = 0.0;
            double lastTrainAcc_strong      = 0.0;
            do
            {
                var normal = Task.Factory.StartNew(() =>
                {
                    for (int i = 0; i < 50; i++)
                    {
                        if (i > 5)
                        {
                            trainer.L2Decay = 0.05;
                        }

                        Console.WriteLine($"[normal] Epoch #{i + 1}");

                        if (i % 50 == 0)
                        {
                            trainer.LearningRate = Math.Max(trainer.LearningRate / 5.0, 0.00001);
                        }

                        trainingScheme.RunEpoch();

                        #region Save Nets

                        if (save)
                        {
                            if (trainingScheme.ValidationAccuracy > lastValidationAcc)
                            {
                                lastValidationAcc = trainingScheme.ValidationAccuracy;
                                lastTrainAcc      = trainingScheme.TrainAccuracy;
                                singleNet.SaveNet(NetName);
                            }
                        }
                        #endregion

                        if (Console.KeyAvailable)
                        {
                            break;
                        }
                    }
                });

                var early = Task.Factory.StartNew(() =>
                {
                    for (int i = 0; i < 50; i++)
                    {
                        if (i > 5)
                        {
                            trainer_early.L2Decay = 0.05;
                        }

                        Console.WriteLine($"[early] Epoch #{i + 1}");

                        if (i % 50 == 0)
                        {
                            trainer_early.LearningRate = Math.Max(trainer_early.LearningRate / 5.0, 0.00001);
                        }

                        trainingScheme_early.RunEpoch();

                        #region Save Nets

                        if (save)
                        {
                            if (trainingScheme_early.ValidationAccuracy > lastValidationAcc_early)
                            {
                                lastValidationAcc_early = trainingScheme_early.ValidationAccuracy;
                                lastTrainAcc_early      = trainingScheme_early.TrainAccuracy;
                                singleNet_early.SaveNet(NetName_early);
                            }
                        }
                        #endregion

                        if (Console.KeyAvailable)
                        {
                            break;
                        }
                    }
                });

                var strong = Task.Factory.StartNew(() =>
                {
                    for (int i = 0; i < 50; i++)
                    {
                        if (i > 5)
                        {
                            trainer_strong.L2Decay = 0.05;
                        }

                        Console.WriteLine($"[strong] Epoch #{i + 1}");

                        if (i % 50 == 0)
                        {
                            trainer_strong.LearningRate = Math.Max(trainer_strong.LearningRate / 5.0, 0.00001);
                        }

                        trainingScheme_strong.RunEpoch();

                        #region Save Nets

                        if (save)
                        {
                            if (trainingScheme_strong.ValidationAccuracy > lastValidationAcc_strong)
                            {
                                lastValidationAcc_strong = trainingScheme_strong.ValidationAccuracy;
                                lastTrainAcc_strong      = trainingScheme_strong.TrainAccuracy;
                                singleNet_strong.SaveNet(NetName_strong);
                            }
                        }
                        #endregion

                        if (Console.KeyAvailable)
                        {
                            break;
                        }
                    }
                });

                Task.WaitAll(new[] { normal, strong, early });
            }while (!Console.KeyAvailable);

            #endregion
        }
 public SgdTrainer(INet <float> net) : base(net)
 {
 }
Beispiel #54
0
 protected abstract void LearnPattern(INet net, ILearningPattern learningPattern, double currentPatternError);