예제 #1
0
        private static BPTTTeacher GetTeacher(int weightsCount, NeuralTuringMachine machine)
        {
            RMSPropWeightUpdater rmsPropWeightUpdater = new RMSPropWeightUpdater(weightsCount, 0.95, 0.5, 0.001, 0.001);

            BPTTTeacher teacher = new BPTTTeacher(machine, rmsPropWeightUpdater);

            return(teacher);
        }
예제 #2
0
        private static void ClassicIterations(int controllerSize, int headCount, int memoryN, int memoryM, Random rand, List <double[][]> inputs, List <double[][]> outputs)
        {
            int weightsCount;
            var machine = GetRandomMachine(out weightsCount, controllerSize, headCount, memoryN, memoryM, rand);
            RMSPropWeightUpdater rmsPropWeightUpdater = new RMSPropWeightUpdater(weightsCount, 0.95, 0.5, 0.001);
            BPTTTeacher          teacher = new BPTTTeacher(machine, rmsPropWeightUpdater);

            double[][] errors = new double[100][];
            long[]     times  = new long[100];
            for (int i = 0; i < 100; i++)
            {
                errors[i] = new double[4];
                for (int j = 0; j < 4; j++)
                {
                    errors[i][j] = 1;
                }
            }

            int count = inputs.Count;

            for (int i = 1; i < 10000000; i++)
            {
                int        index     = rand.Next(count);
                double[][] input     = inputs[index];
                double[][] output    = outputs[index];
                Stopwatch  stopwatch = new Stopwatch();
                stopwatch.Start();
                double[][] machinesOutput = teacher.Train(input, output);
                stopwatch.Stop();
                times[i % 100] = stopwatch.ElapsedMilliseconds;

                double[] error = CalculateLoss(output, machinesOutput);

                errors[i % 100][0] = error[0];
                errors[i % 100][1] = error[1];
                errors[i % 100][2] = error[2];
                errors[i % 100][3] = error[3];

                double averageError = errors.Average(doubles => doubles.Average());

                if (i % 100 == 0)
                {
                    WriteExample(i, times, errors, averageError, output, machinesOutput);
                }
                if (i % 2500 == 0)
                {
                    string directoryName = string.Format("{0}_{1}_{2}_{3}", controllerSize, headCount, memoryM, memoryN);
                    if (!Directory.Exists(directoryName))
                    {
                        Directory.CreateDirectory(directoryName);
                    }
                    string filename = string.Format("NTM_{0}_{1}_{2}.ntm", i, DateTime.Now.ToString("s").Replace(":", ""),
                                                    averageError);
                    machine.Save(Path.Combine(directoryName, filename));
                }
            }
        }
예제 #3
0
 public LearningTask(NeuralTuringMachine machine, RMSPropWeightUpdater weightUpdater, int id)
 {
     _iterations            = 0;
     _machine               = machine;
     _weightUpdater         = weightUpdater;
     _id                    = id;
     _teacher               = new BPTTTeacher(_machine, weightUpdater);
     _longTermAverageErrors = new List <double>();
     Priority               = 10;
 }
예제 #4
0
        public void CopyFrom(LearningTask task, int weightsCount)
        {
            CopyMachine copyMachine = new CopyMachine(weightsCount, task._machine);

            _machine.UpdateWeights(copyMachine);
            _weightUpdater         = task._weightUpdater.Clone();
            _teacher               = new BPTTTeacher(_machine, _weightUpdater);
            _iterations            = task._iterations;
            _longTermAverageErrors = new List <double>(task._longTermAverageErrors);
            Priority               = task.Priority;
        }
예제 #5
0
 public LearningTask(NeuralTuringMachine machine, RMSPropWeightUpdater weightUpdater, Func <Tuple <double[][], double[][]> > exampleGenerator, string directoryName, int id)
 {
     _iterations       = 0;
     _machine          = machine;
     _weightUpdater    = weightUpdater;
     _exampleGenerator = exampleGenerator;
     _directoryName    = directoryName;
     _id      = id;
     _teacher = new BPTTTeacher(_machine, weightUpdater);
     _longTermAverageErrors = new List <double>();
     Priority = 100 / 32;
 }
예제 #6
0
        private static void Iterate(BPTTTeacher teacher, double[] errors, long[] times, int id)
        {
            const int vectorSize = 8;
            const int minSeqLen  = 1;
            const int maxSeqLen  = 20;
            //double savingThreshold = 0.0005;
            Random rand = new Random(DateTime.Now.Millisecond);

            for (int i = 1; i <= 100; i++)
            {
                var sequence = SequenceGenerator.GenerateSequence(rand.Next(minSeqLen, maxSeqLen), vectorSize);

                Stopwatch stopwatch = new Stopwatch();
                stopwatch.Start();
                double[][] machinesOutput = teacher.Train(sequence.Item1, sequence.Item2);
                stopwatch.Stop();
                times[i % 100] = stopwatch.ElapsedMilliseconds;

                double error = CalculateLoss(sequence.Item2, machinesOutput);

                errors[i % 100] = error;

                if (i % 100 == 0)
                {
                    double averageError2 = errors.Average();
                    Console.WriteLine("Iteration: {0}, error: {1}, id: {2}", i, averageError2, id);

                    //if (averageError2 < savingThreshold)
                    //{
                    //    savingThreshold /= 2;
                    //    machine.Save("NTM_" + averageError2 + "_" + DateTime.Now.ToString("s").Replace(":", ""));
                    //    maxSeqLen++;
                    //    minSeqLen++;
                    //}
                }
                //if (i % 100000 == 0)
                //{
                //    machine.Save("NTM_" + i + DateTime.Now.ToString("s").Replace(":", ""));
                //}
            }
        }
예제 #7
0
        private static void StandardCopyTask(DataStream reportStream)
        {
            double[] errors = new double[100];
            long[]   times  = new long[100];
            for (int i = 0; i < 100; i++)
            {
                errors[i] = 1;
            }

            const int seed = 32702;

            Console.WriteLine(seed);
            //TODO args parsing shit
            Random rand = new Random(seed);

            const int vectorSize     = 8;
            const int controllerSize = 100;
            const int headsCount     = 1;
            const int memoryN        = 128;
            const int memoryM        = 20;
            const int inputSize      = vectorSize + 2;
            const int outputSize     = vectorSize;

            //TODO remove rand
            NeuralTuringMachine machine = new NeuralTuringMachine(vectorSize + 2, vectorSize, controllerSize, headsCount, memoryN, memoryM, new RandomWeightInitializer(rand));

            //TODO extract weight count calculation
            int headUnitSize = Head.GetUnitSize(memoryM);

            var weightsCount =
                (headsCount * memoryN) +
                (memoryN * memoryM) +
                (controllerSize * headsCount * memoryM) +
                (controllerSize * inputSize) +
                (controllerSize) +
                (outputSize * (controllerSize + 1)) +
                (headsCount * headUnitSize * (controllerSize + 1));

            Console.WriteLine(weightsCount);

            RMSPropWeightUpdater rmsPropWeightUpdater = new RMSPropWeightUpdater(weightsCount, 0.95, 0.5, 0.001, 0.001);

            //NeuralTuringMachine machine = NeuralTuringMachine.Load(@"NTM_0.000583637804331003_2015-04-18T223455");

            BPTTTeacher teacher = new BPTTTeacher(machine, rmsPropWeightUpdater);


            //for (int i = 1; i < 256; i++)
            //{
            //    var sequence = SequenceGenerator.GenerateSequence(i, vectorSize);
            //    double[][] machineOutput = teacher.Train(sequence.Item1, sequence.Item2);
            //    double error = CalculateLoss(sequence.Item2, machineOutput);
            //    Console.WriteLine("{0},{1}", i, error);
            //}

            int    minSeqLen       = 200;
            int    maxSeqLen       = 200;
            double savingThreshold = 0.0005;

            for (int i = 1; i < 10000000; i++)
            {
                //var sequence = SequenceGenerator.GenerateSequence(rand.Next(20) + 1, vectorSize);
                var sequence = SequenceGenerator.GenerateSequence(rand.Next(minSeqLen, maxSeqLen), vectorSize);

                Stopwatch stopwatch = new Stopwatch();
                stopwatch.Start();
                double[][] headAddressings;
                double[][] machinesOutput = teacher.TrainVerbose(sequence.Item1, sequence.Item2, out headAddressings);
                stopwatch.Stop();
                times[i % 100] = stopwatch.ElapsedMilliseconds;

                double error        = CalculateLoss(sequence.Item2, machinesOutput);
                double averageError = error / (sequence.Item2.Length * sequence.Item2[0].Length);

                errors[i % 100] = error;

                if (reportStream != null)
                {
                    reportStream.Set("Iteration", i);
                    reportStream.Set("Average data loss", averageError);
                    reportStream.Set("Training time", stopwatch.ElapsedMilliseconds);
                    reportStream.Set("Sequence length", (sequence.Item1.Length - 2) / 2);
                    reportStream.Set("Input", sequence.Item1);
                    reportStream.Set("Known output", sequence.Item2);
                    reportStream.Set("Real output", machinesOutput);
                    reportStream.Set("Head addressings", headAddressings);
                    reportStream.SendData();
                }

                if (i % 100 == 0)
                {
                    double averageError2 = errors.Average();
                    Console.WriteLine(
                        "Iteration: {0}, error: {1}, iterations per second: {2:0.0} MinSeqLen: {3} MaxSeqLen: {4}", i,
                        averageError2, 1000 / times.Average(), minSeqLen, maxSeqLen);

                    if (averageError2 < savingThreshold)
                    {
                        savingThreshold /= 2;
                        machine.Save("NTM_" + averageError2 + "_" + DateTime.Now.ToString("s").Replace(":", ""));
                        maxSeqLen++;
                        minSeqLen++;
                    }
                }
                if (i % 100000 == 0)
                {
                    machine.Save("NTM_" + i + DateTime.Now.ToString("s").Replace(":", ""));
                }
            }
        }
예제 #8
0
        private static void MultipleSimultaniousAvgCopyTasks()
        {
            const int numberOfThreads       = 1;
            const int numberOfParallelTasks = 16;
            bool      end = false;
            BlockingCollection <Tuple <Action <int>, int> > work = new BlockingCollection <Tuple <Action <int>, int> >();

            Thread[] threads = new Thread[numberOfThreads];

            SemaphoreSlim[] semaphores = new SemaphoreSlim[numberOfParallelTasks];

            for (int i = 0; i < numberOfParallelTasks; i++)
            {
                semaphores[i] = new SemaphoreSlim(0);
            }

            for (int i = 0; i < numberOfThreads; i++)
            {
                threads[i] = new Thread(
                    () =>
                {
                    while (!end)
                    {
                        var action = work.Take();
                        action.Item1(action.Item2);
                        semaphores[action.Item2].Release();
                    }
                });
                threads[i].Start();
            }

            double[][]            errorss  = new double[numberOfParallelTasks][];
            long[][]              timess   = new long[numberOfParallelTasks][];
            NeuralTuringMachine[] machines = new NeuralTuringMachine[numberOfParallelTasks];
            BPTTTeacher[]         teachers = new BPTTTeacher[numberOfParallelTasks];

            int weightsCount = 0;

            for (int i = 0; i < numberOfParallelTasks; i++)
            {
                errorss[i] = new double[100];
                timess[i]  = new long[100];
                for (int j = 0; j < 100; j++)
                {
                    errorss[i][j] = 1;
                }
                machines[i] = GetRandomMachine(out weightsCount);
                teachers[i] = GetTeacher(weightsCount, machines[i]);
            }

            int k = 1;

            while (!end)
            {
                Stopwatch stopwatch = new Stopwatch();
                stopwatch.Start();
                for (int i = 0; i < numberOfParallelTasks; i++)
                {
                    var index = i;
                    work.Add(new Tuple <Action <int>, int>(id => Iterate(teachers[index], errorss[index], timess[index], id), index));
                }

                for (int i = 0; i < numberOfParallelTasks; i++)
                {
                    semaphores[i].Wait();
                }

                Console.WriteLine("Average NTMs");

                double[] errors = errorss.Select(doubles => doubles.Average()).ToArray();
                AverageMachineWeightUpdater averageWeightUpdater = new AverageMachineWeightUpdater(weightsCount, machines);

                foreach (NeuralTuringMachine machine in machines)
                {
                    machine.UpdateWeights(averageWeightUpdater);
                    averageWeightUpdater.Reset();
                }

                for (int i = 0; i < numberOfParallelTasks; i++)
                {
                    teachers[i] = GetTeacher(weightsCount, machines[i]);
                }

                Console.WriteLine("Iteration: {0}", k);
                Console.WriteLine("Average error: {0}", errors.Average());
                Console.WriteLine("Best error: {0}", errors.Min());
                k++;

                stopwatch.Stop();
                double seconds = stopwatch.ElapsedMilliseconds / (double)1000;
                Console.WriteLine("Time: {0}[s] per task: {1}[s]", seconds, seconds / numberOfParallelTasks);
            }
        }
예제 #9
0
        static void Main()
        {
            DataStream reportStream = null;
            try
            {
                YoVisionClientHelper yoVisionClientHelper = new YoVisionClientHelper();
                yoVisionClientHelper.Connect(EndpointType.NetTcp, 8081, "localhost", "YoVisionServer");
                reportStream = yoVisionClientHelper.RegisterDataStream("NGram task training",
                    new Int32DataType("Iteration"),
                    new DoubleDataType("Average data loss"),
                    new Double2DArrayType("Input"),
                    new Double2DArrayType("Known output"),
                    new Double2DArrayType("Real output"),
                    new Double2DArrayType("Head addressings"));
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }

            const int controllerSize = 100;
            const int headsCount = 1;
            const int memoryN = 128;
            const int memoryM = 20;
            const int inputSize = 1;
            const int outputSize = 1;

            Random rand = new Random(42);
            NeuralTuringMachine machine = new NeuralTuringMachine(inputSize, outputSize, controllerSize, headsCount, memoryN, memoryM, new RandomWeightInitializer(rand));

            int headUnitSize = Head.GetUnitSize(memoryM);

             var weightsCount =
                (headsCount * memoryN) +
                (memoryN * memoryM) +
                (controllerSize * headsCount * memoryM) +
                (controllerSize * inputSize) +
                (controllerSize) +
                (outputSize * (controllerSize + 1)) +
                (headsCount * headUnitSize * (controllerSize + 1));

            Console.WriteLine(weightsCount);

            RMSPropWeightUpdater rmsPropWeightUpdater = new RMSPropWeightUpdater(weightsCount, 0.95, 0.5, 0.001, 0.001);

            BPTTTeacher teacher = new BPTTTeacher(machine, rmsPropWeightUpdater);

            long[] times = new long[100];

            for (int i = 1; i < 10000000; i++)
            {
                Tuple<double[][], double[][]> data = SequenceGenerator.GenerateSequence(SequenceGenerator.GeneratePropabilities());

                Stopwatch stopwatch = new Stopwatch();
                stopwatch.Start();
                double[][] headAddressings;
                double[][] output = teacher.TrainVerbose(data.Item1, data.Item2, out headAddressings);
                stopwatch.Stop();
                times[i % 100] = stopwatch.ElapsedMilliseconds;
                
                if (i%10 == 0)
                {
                    double loss = CalculateLogLoss(output, data.Item2);
                    if (reportStream != null)
                    {
                        reportStream.Set("Iteration", i);
                        reportStream.Set("Average data loss", loss);
                        reportStream.Set("Input", data.Item1);
                        reportStream.Set("Known output", data.Item2);
                        reportStream.Set("Real output", output);
                        reportStream.Set("Head addressings", headAddressings);
                        reportStream.SendData();
                    }
                }

                if (i%100 == 0)
                {
                    Console.WriteLine("Iteration: {0}, iterations per second: {1:0.0}", i, 1000 / times.Average());
                }


                if (i%1000 == 0)
                {
                    double[] props = SequenceGenerator.GeneratePropabilities();
                    
                    const int sampleCount = 100;

                    double[] losses = new double[sampleCount];

                    for (int j = 0; j < sampleCount; j++)
                    {
                        Tuple<double[][], double[][]> sequence = SequenceGenerator.GenerateSequence(props);
                        var machineOutput = teacher.Train(sequence.Item1, sequence.Item2);
                        double[][] knownOutput = sequence.Item2;
                        double loss = CalculateLogLoss(machineOutput, knownOutput);
                        losses[j] = -loss;
                    }

                    Console.WriteLine("Loss [bits per sequence]: {0}", losses.Average());
                }

                if (i % 1000 == 0)
                {
                    machine.Save("NTM_" + i + DateTime.Now.ToString("s").Replace(":", ""));
                }
            }
        }