Ejemplo n.º 1
0
        private void EmitStloc(FunctionStack stack, List<StackValue> locals, int localIndex)
        {
            var value = stack.Pop();
            var local = locals[localIndex];

            // Convert from stack to local value
            var stackValue = ConvertFromStackToLocal(local.Type, value);

            // Store value into local
            StoreValue(local.Type.StackType, stackValue, local.Value, InstructionFlags.None);
        }
Ejemplo n.º 2
0
        private void EmitLdloc(FunctionStack stack, List<StackValue> locals, int operandIndex)
        {
            var local = locals[operandIndex];

            // Load value from local
            var value = LoadValue(local.StackType, local.Value, InstructionFlags.None);

            // Convert from local to stack value
            value = ConvertFromLocalToStack(local.Type, value);

            // Add value to stack
            stack.Add(new StackValue(local.StackType, local.Type, value));
        }
Ejemplo n.º 3
0
        private void EmitLdloca(FunctionStack stack, List<StackValue> locals, int operandIndex)
        {
            var local = locals[operandIndex];

            var refType = GetType(local.Type.TypeReferenceCecil.MakeByReferenceType(), TypeState.Opaque);

            // Convert from local to stack value
            var value = ConvertFromLocalToStack(refType, local.Value);

            // Add value to stack
            // TODO: Choose appropriate type + conversions
            stack.Add(new StackValue(StackValueType.Reference, refType, value));
        }
Ejemplo n.º 4
0
        private void EmitStfld(FunctionStack stack, Field field, InstructionFlags instructionFlags)
        {
            var value = stack.Pop();
            var @object = stack.Pop();

            // Compute field address
            var fieldAddress = ComputeFieldAddress(builder, field, @object.StackType, @object.Value, ref instructionFlags);

            // Convert stack value to appropriate type
            var fieldValue = ConvertFromStackToLocal(field.Type, value);

            // Store value in field
            StoreValue(field.Type.StackType, fieldValue, fieldAddress, instructionFlags);
        }
Ejemplo n.º 5
0
        private void EmitLdflda(FunctionStack stack, Field field)
        {
            var @object = stack.Pop();

            var refType = GetType(field.Type.TypeReferenceCecil.MakeByReferenceType(), TypeState.Opaque);

            // Compute field address
            var instructionFlags = InstructionFlags.None;
            var fieldAddress = ComputeFieldAddress(builder, field, @object.StackType, @object.Value, ref instructionFlags);

            // Add value to stack
            stack.Add(new StackValue(StackValueType.Reference, refType, fieldAddress));
        }
Ejemplo n.º 6
0
 private void EmitLdnull(FunctionStack stack)
 {
     // Add constant integer value to stack
     stack.Add(new StackValue(StackValueType.Object, @object, LLVM.ConstNull(@object.DefaultTypeLLVM)));
 }
Ejemplo n.º 7
0
 private void EmitBrfalse(FunctionStack stack, BasicBlockRef targetBasicBlock, BasicBlockRef nextBasicBlock)
 {
     // Stack element should be equal to zero.
     EmitBrCommon(stack.Pop(), IntPredicate.IntEQ, targetBasicBlock, nextBasicBlock);
 }
Ejemplo n.º 8
0
        public static void Run()
        {
            Console.WriteLine("Build Vocabulary.");

            Vocabulary vocabulary = new Vocabulary();

            string trainPath = InternetFileDownloader.Download(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE);
            string validPath = InternetFileDownloader.Download(DOWNLOAD_URL + VALID_FILE, VALID_FILE);
            string testPath  = InternetFileDownloader.Download(DOWNLOAD_URL + TEST_FILE, TEST_FILE);

            int[] trainData = vocabulary.LoadData(trainPath);
            int[] validData = vocabulary.LoadData(validPath);
            int[] testData  = vocabulary.LoadData(testPath);

            int nVocab = vocabulary.Length;

            Console.WriteLine("Network Initilizing.");
            FunctionStack model = new FunctionStack(
                new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"),
                new Dropout(),
                new LSTM(N_UNITS, N_UNITS, name: "l2 LSTM"),
                new Dropout(),
                new LSTM(N_UNITS, N_UNITS, name: "l3 LSTM"),
                new Dropout(),
                new Linear(N_UNITS, nVocab, name: "l4 Linear")
                );

            //与えられたthresholdで頭打ちではなく、全パラメータのL2Normからレートを取り補正を行う
            GradientClipping gradientClipping = new GradientClipping(threshold: GRAD_CLIP);
            SGD sgd = new SGD(learningRate: 1);

            model.SetOptimizer(gradientClipping, sgd);

            Real wholeLen = trainData.Length;
            int  jump     = (int)Math.Floor(wholeLen / BATCH_SIZE);
            int  epoch    = 0;

            Stack <NdArray[]> backNdArrays = new Stack <NdArray[]>();

            Console.WriteLine("Train Start.");

            for (int i = 0; i < jump * N_EPOCH; i++)
            {
                NdArray x = new NdArray(new[] { 1 }, BATCH_SIZE);
                NdArray t = new NdArray(new[] { 1 }, BATCH_SIZE);

                for (int j = 0; j < BATCH_SIZE; j++)
                {
                    x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)];
                    t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)];
                }

                NdArray[] result  = model.Forward(x);
                Real      sumLoss = new SoftmaxCrossEntropy().Evaluate(result, t);
                backNdArrays.Push(result);
                Console.WriteLine("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss);

                //Run truncated BPTT
                if ((i + 1) % BPROP_LEN == 0)
                {
                    for (int j = 0; backNdArrays.Count > 0; j++)
                    {
                        Console.WriteLine("backward" + backNdArrays.Count);
                        model.Backward(backNdArrays.Pop());
                    }

                    model.Update();
                    model.ResetState();
                }

                if ((i + 1) % jump == 0)
                {
                    epoch++;
                    Console.WriteLine("evaluate");
                    Console.WriteLine("validation perplexity: {0}", Evaluate(model, validData));

                    if (epoch >= 6)
                    {
                        sgd.LearningRate /= 1.2;
                        Console.WriteLine("learning rate =" + sgd.LearningRate);
                    }
                }
            }

            Console.WriteLine("test start");
            Console.WriteLine("test perplexity:" + Evaluate(model, testData));
        }
Ejemplo n.º 9
0
 private void EmitR4(FunctionStack stack, float operandIndex)
 {
     // Add constant integer value to stack
     stack.Add(new StackValue(StackValueType.Float, @float,
         LLVM.ConstReal(@float.DataTypeLLVM, operandIndex)));
 }
Ejemplo n.º 10
0
        public static void CreateFunctionTree(string[] postfixArray, FunctionTree FTree)
        {
            FunctionStack fStk = new FunctionStack();
            string        f;
            int           i = 0;

            FTree.varList = new List <FVar>();

            while (i < postfixArray.Length)
            {
                f = postfixArray[i];
                //System.out.println("fff "+ f);

                if (f.Equals(FNeg.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FNeg.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FNeg(fStk.TopAndPop()));
                }
                else if (f.Equals(FAdd.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FAdd.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FAdd(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FSub.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FSub.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FSub(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FMul.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FMul.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FMul(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FDiv.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FDiv.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FDiv(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FPow.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FPow.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FPow(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FExp.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FExp.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FExp(fStk.TopAndPop()));
                }
                else if (f.Equals(FLn.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FLn.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FLn(fStk.TopAndPop()));
                }
                // sin()
                else if (f.Equals(FSin.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FSin.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FSin(fStk.TopAndPop()));
                }
                else if (f.Equals(FCos.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FCos.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FCos(fStk.TopAndPop()));
                }

                // add rest of the functions

                else if (IsConstant(f))                 // double number
                {
                    fStk.Push(new FCons(Convert.ToDouble(f)));
                }
                else
                {
                    FVar @var = FTree.FindVariable(f);
                    if (@var != null)
                    {
                        fStk.Push(@var);
                    }
                    else
                    {
                        fStk.Push(FTree.AddVariable(new FVar(f)));                         // variable
                    }
                }

                i++;
            }



            FTree.rootNode = fStk.TopAndPop();
        }
Ejemplo n.º 11
0
        private void EmitLdelema(FunctionStack stack, Type elementType)
        {
            var index = stack.Pop();
            var array = stack.Pop();

            // Force array type to be emitted
            GetType(array.Type.TypeReferenceCecil, TypeState.VTableEmitted);

            var indexValue = ConvertToNativeInt(index);

            var refType = GetType(elementType.TypeReferenceCecil.MakeByReferenceType(), TypeState.Opaque);

            // Load array data pointer
            var arrayFirstElement = LoadArrayDataPointer(array);

            // Find pointer of element at requested index
            var arrayElementPointer = LLVM.BuildGEP(builder, arrayFirstElement, new[] { indexValue }, string.Empty);

            // Convert
            arrayElementPointer = ConvertFromLocalToStack(refType, arrayElementPointer);

            // Push loaded element address onto the stack
            stack.Add(new StackValue(refType.StackType, refType, arrayElementPointer));
        }
Ejemplo n.º 12
0
        const int N = 30; //It operates at 1000 similar to the reference link but it is slow at the CPU

        public static void Run()
        {
            Console.WriteLine("MNIST Data Loading...");
            MnistData mnistData = new MnistData(28);

            Console.WriteLine("Training Start...");

            //Writing the network configuration in FunctionStack
            FunctionStack nn = new FunctionStack(
                new Linear(28 * 28, N, name: "l1 Linear"), // L1
                new BatchNormalization(N, name: "l1 BatchNorm"),
                new ReLU(name: "l1 ReLU"),
                new Linear(N, N, name: "l2 Linear"), // L2
                new BatchNormalization(N, name: "l2 BatchNorm"),
                new ReLU(name: "l2 ReLU"),
                new Linear(N, N, name: "l3 Linear"), // L3
                new BatchNormalization(N, name: "l3 BatchNorm"),
                new ReLU(name: "l3 ReLU"),
                new Linear(N, N, name: "l4 Linear"), // L4
                new BatchNormalization(N, name: "l4 BatchNorm"),
                new ReLU(name: "l4 ReLU"),
                new Linear(N, N, name: "l5 Linear"), // L5
                new BatchNormalization(N, name: "l5 BatchNorm"),
                new ReLU(name: "l5 ReLU"),
                new Linear(N, N, name: "l6 Linear"), // L6
                new BatchNormalization(N, name: "l6 BatchNorm"),
                new ReLU(name: "l6 ReLU"),
                new Linear(N, N, name: "l7 Linear"), // L7
                new BatchNormalization(N, name: "l7 BatchNorm"),
                new ReLU(name: "l7 ReLU"),
                new Linear(N, N, name: "l8 Linear"), // L8
                new BatchNormalization(N, name: "l8 BatchNorm"),
                new ReLU(name: "l8 ReLU"),
                new Linear(N, N, name: "l9 Linear"), // L9
                new BatchNormalization(N, name: "l9 BatchNorm"),
                new ReLU(name: "l9 ReLU"),
                new Linear(N, N, name: "l10 Linear"), // L10
                new BatchNormalization(N, name: "l10 BatchNorm"),
                new ReLU(name: "l10 ReLU"),
                new Linear(N, N, name: "l11 Linear"), // L11
                new BatchNormalization(N, name: "l11 BatchNorm"),
                new ReLU(name: "l11 ReLU"),
                new Linear(N, N, name: "l12 Linear"), // L12
                new BatchNormalization(N, name: "l12 BatchNorm"),
                new ReLU(name: "l12 ReLU"),
                new Linear(N, N, name: "l13 Linear"), // L13
                new BatchNormalization(N, name: "l13 BatchNorm"),
                new ReLU(name: "l13 ReLU"),
                new Linear(N, N, name: "l14 Linear"), // L14
                new BatchNormalization(N, name: "l14 BatchNorm"),
                new ReLU(name: "l14 ReLU"),
                new Linear(N, 10, name: "l15 Linear") // L15
                );

            nn.SetOptimizer(new AdaGrad());


            for (int epoch = 0; epoch < 3; epoch++)
            {
                Console.WriteLine("epoch " + (epoch + 1));

                //List<Real> totalLoss = new List<Real>();
                Real totalLoss        = 0;
                long totalLossCounter = 0;

                //Run the batch
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    //Get data randomly from training data
                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT, 28, 28);

                    //Learn
                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    if (i % 20 == 0)
                    {
                        Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                        Console.WriteLine("total loss " + totalLoss / totalLossCounter);
                        Console.WriteLine("local loss " + sumLoss);
                        Console.WriteLine("");
                        Console.WriteLine("Testing...");

                        //Get data randomly from test data
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT, 28);

                        //Run the test
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 13
0
        public static void Run()
        {
            //Number of exercises
            const int learningCount = 10000;

            //Training data
            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //Training data label
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //Network configuration is written in FunctionStack
            FunctionStack nn = new FunctionStack(
                new Linear(2, 2, name: "l1 Linear"),
                new Sigmoid(name: "l1 Sigmoid"),
                new Linear(2, 2, name: "l2 Linear")
                );

            nn.SetOptimizer(new MomentumSGD());

            //Training looP
            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    //Describe the loss function at training execution
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            //Show training results
            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }

            //Save network after learning
            Console.WriteLine("Model Saveing...");
            ModelIO.Save(nn, "test.nn");

            //Load the network after learning
            Console.WriteLine("Model Loading...");
            FunctionStack testnn = ModelIO.Load("test.nn");

            Console.WriteLine("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn.Predict(input)[0];
                int     resultIndex = Array.IndexOf(result.Data, result.Data.Max());
                Console.WriteLine(input[0] + " xor " + input[1] + " = " + resultIndex + " " + result);
            }
        }
Ejemplo n.º 14
0
        public static void Run()
        {
            const int learningCount = 10000;

            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            bool verbose = true;

            FunctionStack nn = new FunctionStack("Test1",
                                                 new Linear(verbose, 2, 2, name: "l1 Linear"),
                                                 new Sigmoid(name: "l1 Sigmoid"),
                                                 new Linear(verbose, 2, 2, name: "l2 Linear"));

            nn.SetOptimizer(new MomentumSGD());

            Info("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                for (int j = 0; j < trainData.Length; j++)
                {
                    Trainer.Train(nn, trainData[j], trainLabel[j], new SoftmaxCrossEntropy());
                }
            }

            Info("Test Start...");

            foreach (Real[] input in trainData)
            {
                NdArray result      = nn.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }

            Info("Saving Model...");
            ModelIO.Save(nn, "test.nn");

            Info("Loading Model...");
            FunctionStack testnn = ModelIO.Load("test.nn");

            Info(testnn.Describe());

            Info("Test Start...");
            foreach (Real[] input in trainData)
            {
                NdArray result      = testnn?.Predict(true, input)?[0];
                int     resultIndex = Array.IndexOf(result?.Data, result?.Data.Max());
                Info($"{input[0]} xor {input[1]} = {resultIndex} {result}");
            }
        }
Ejemplo n.º 15
0
        public static void Run()
        {
            //MNISTのデータを用意する
            Console.WriteLine("MNIST Data Loading...");
            MnistData mnistData = new MnistData();


            Console.WriteLine("Training Start...");

            //ネットワークの構成を FunctionStack に書き連ねる
            FunctionStack Layer1 = new FunctionStack(
                new Linear(28 * 28, 256, name: "l1 Linear"),
                new BatchNormalization(256, name: "l1 Norm"),
                new ReLU(name: "l1 ReLU")
                );

            FunctionStack Layer2 = new FunctionStack(
                new Linear(256, 256, name: "l2 Linear"),
                new BatchNormalization(256, name: "l2 Norm"),
                new ReLU(name: "l2 ReLU")
                );

            FunctionStack Layer3 = new FunctionStack(
                new Linear(256, 256, name: "l3 Linear"),
                new BatchNormalization(256, name: "l3 Norm"),
                new ReLU(name: "l3 ReLU")
                );

            FunctionStack Layer4 = new FunctionStack(
                new Linear(256, 10, name: "l4 Linear")
                );

            //FunctionStack自身もFunctionとして積み上げられる
            FunctionStack nn = new FunctionStack
                               (
                Layer1,
                Layer2,
                Layer3,
                Layer4
                               );

            FunctionStack DNI1 = new FunctionStack(
                new Linear(256, 1024, name: "DNI1 Linear1"),
                new BatchNormalization(1024, name: "DNI1 Nrom1"),
                new ReLU(name: "DNI1 ReLU1"),
                new Linear(1024, 1024, name: "DNI1 Linear2"),
                new BatchNormalization(1024, name: "DNI1 Nrom2"),
                new ReLU(name: "DNI1 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI1 Linear3")
                );

            FunctionStack DNI2 = new FunctionStack(
                new Linear(256, 1024, name: "DNI2 Linear1"),
                new BatchNormalization(1024, name: "DNI2 Nrom1"),
                new ReLU(name: "DNI2 ReLU1"),
                new Linear(1024, 1024, name: "DNI2 Linear2"),
                new BatchNormalization(1024, name: "DNI2 Nrom2"),
                new ReLU(name: "DNI2 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI2 Linear3")
                );

            FunctionStack DNI3 = new FunctionStack(
                new Linear(256, 1024, name: "DNI3 Linear1"),
                new BatchNormalization(1024, name: "DNI3 Nrom1"),
                new ReLU(name: "DNI3 ReLU1"),
                new Linear(1024, 1024, name: "DNI3 Linear2"),
                new BatchNormalization(1024, name: "DNI3 Nrom2"),
                new ReLU(name: "DNI3 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI3 Linear3")
                );

            //optimizerを宣言
            Layer1.SetOptimizer(new Adam());
            Layer2.SetOptimizer(new Adam());
            Layer3.SetOptimizer(new Adam());
            Layer4.SetOptimizer(new Adam());

            DNI1.SetOptimizer(new Adam());
            DNI2.SetOptimizer(new Adam());
            DNI3.SetOptimizer(new Adam());

            //三世代学習
            for (int epoch = 0; epoch < 20; epoch++)
            {
                Console.WriteLine("epoch " + (epoch + 1));

                Real totalLoss     = 0;
                Real DNI1totalLoss = 0;
                Real DNI2totalLoss = 0;
                Real DNI3totalLoss = 0;

                long totalLossCount     = 0;
                long DNI1totalLossCount = 0;
                long DNI2totalLossCount = 0;
                long DNI3totalLossCount = 0;

                //何回バッチを実行するか
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    //訓練データからランダムにデータを取得
                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT);

                    //第一層を実行
                    NdArray[] layer1ForwardResult = Layer1.Forward(datasetX.Data);

                    //第一層の傾きを取得
                    NdArray[] DNI1Result = DNI1.Forward(layer1ForwardResult);

                    //第一層の傾きを適用
                    layer1ForwardResult[0].Grad = DNI1Result[0].Data.ToArray();

                    //第一層を更新
                    Layer1.Backward(layer1ForwardResult);
                    layer1ForwardResult[0].ParentFunc = null; //Backwardを実行したので計算グラフを切っておく
                    Layer1.Update();

                    //第二層を実行
                    NdArray[] layer2ForwardResult = Layer2.Forward(layer1ForwardResult);

                    //第二層の傾きを取得
                    NdArray[] DNI2Result = DNI2.Forward(layer2ForwardResult);

                    //第二層の傾きを適用
                    layer2ForwardResult[0].Grad = DNI2Result[0].Data.ToArray();

                    //第二層を更新
                    Layer2.Backward(layer2ForwardResult);
                    layer2ForwardResult[0].ParentFunc = null;

                    //第一層用のDNIの学習を実行
                    Real DNI1loss = new MeanSquaredError().Evaluate(DNI1Result, new NdArray(layer1ForwardResult[0].Grad, DNI1Result[0].Shape, DNI1Result[0].BatchCount));

                    Layer2.Update();

                    DNI1.Backward(DNI1Result);
                    DNI1.Update();

                    DNI1totalLoss += DNI1loss;
                    DNI1totalLossCount++;

                    //第三層を実行
                    NdArray[] layer3ForwardResult = Layer3.Forward(layer2ForwardResult);

                    //第三層の傾きを取得
                    NdArray[] DNI3Result = DNI3.Forward(layer3ForwardResult);

                    //第三層の傾きを適用
                    layer3ForwardResult[0].Grad = DNI3Result[0].Data.ToArray();

                    //第三層を更新
                    Layer3.Backward(layer3ForwardResult);
                    layer3ForwardResult[0].ParentFunc = null;

                    //第二層用のDNIの学習を実行
                    Real DNI2loss = new MeanSquaredError().Evaluate(DNI2Result, new NdArray(layer2ForwardResult[0].Grad, DNI2Result[0].Shape, DNI2Result[0].BatchCount));

                    Layer3.Update();

                    DNI2.Backward(DNI2Result);
                    DNI2.Update();

                    DNI2totalLoss += DNI2loss;
                    DNI2totalLossCount++;

                    //第四層を実行
                    NdArray[] layer4ForwardResult = Layer4.Forward(layer3ForwardResult);

                    //第四層の傾きを取得
                    Real sumLoss = new SoftmaxCrossEntropy().Evaluate(layer4ForwardResult, datasetX.Label);

                    //第四層を更新
                    Layer4.Backward(layer4ForwardResult);
                    layer4ForwardResult[0].ParentFunc = null;

                    totalLoss += sumLoss;
                    totalLossCount++;

                    //第三層用のDNIの学習を実行
                    Real DNI3loss = new MeanSquaredError().Evaluate(DNI3Result, new NdArray(layer3ForwardResult[0].Grad, DNI3Result[0].Shape, DNI3Result[0].BatchCount));

                    Layer4.Update();

                    DNI3.Backward(DNI3Result);
                    DNI3.Update();

                    DNI3totalLoss += DNI3loss;
                    DNI3totalLossCount++;

                    Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                    //結果出力
                    Console.WriteLine("total loss " + totalLoss / totalLossCount);
                    Console.WriteLine("local loss " + sumLoss);

                    Console.WriteLine("\nDNI1 total loss " + DNI1totalLoss / DNI1totalLossCount);
                    Console.WriteLine("DNI2 total loss " + DNI2totalLoss / DNI2totalLossCount);
                    Console.WriteLine("DNI3 total loss " + DNI3totalLoss / DNI3totalLossCount);

                    Console.WriteLine("\nDNI1 local loss " + DNI1loss);
                    Console.WriteLine("DNI2 local loss " + DNI2loss);
                    Console.WriteLine("DNI3 local loss " + DNI3loss);

                    //20回バッチを動かしたら精度をテストする
                    if (i % 20 == 0)
                    {
                        Console.WriteLine("\nTesting...");

                        //テストデータからランダムにデータを取得
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT);

                        //テストを実行
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 16
0
        public static void Run()
        {
            Console.WriteLine("Build Vocabulary.");

            Vocabulary vocabulary = new Vocabulary();
            string     trainPath  = InternetFileDownloader.Donwload(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE, TRAIN_FILE_HASH);
            string     testPath   = InternetFileDownloader.Donwload(DOWNLOAD_URL + TEST_FILE, TEST_FILE, TEST_FILE_HASH);

            int[] trainData = vocabulary.LoadData(trainPath);
            int[] testData  = vocabulary.LoadData(testPath);

            int nVocab = vocabulary.Length;

            Console.WriteLine("Done.");

            Console.WriteLine("Network Initilizing.");
            FunctionStack model = new FunctionStack(
                new EmbedID(nVocab, N_UNITS, name: "l1 EmbedID"),
                new Linear(N_UNITS, N_UNITS, name: "l2 Linear"),
                new TanhActivation("l2 Tanh"),
                new Linear(N_UNITS, nVocab, name: "l3 Linear"),
                new Softmax("l3 Sonftmax")
                );

            model.SetOptimizer(new Adam());

            List <int> s = new List <int>();

            Console.WriteLine("Train Start.");
            SoftmaxCrossEntropy softmaxCrossEntropy = new SoftmaxCrossEntropy();

            for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++)
            {
                for (int pos = 0; pos < trainData.Length; pos++)
                {
                    NdArray h = new NdArray(new Real[N_UNITS]);

                    int id = trainData[pos];
                    s.Add(id);

                    if (id == vocabulary.EosID)
                    {
                        Real            accumloss = 0;
                        Stack <NdArray> tmp       = new Stack <NdArray>();

                        for (int i = 0; i < s.Count; i++)
                        {
                            int tx = i == s.Count - 1 ? vocabulary.EosID : s[i + 1];

                            //l1 EmbedID
                            NdArray l1 = model.Functions[0].Forward(s[i])[0];

                            //l2 Linear
                            NdArray l2 = model.Functions[1].Forward(h)[0];

                            //Add
                            NdArray xK = l1 + l2;

                            //l2 Tanh
                            h = model.Functions[2].Forward(xK)[0];

                            //l3 Linear
                            NdArray h2 = model.Functions[3].Forward(h)[0];

                            Real loss = softmaxCrossEntropy.Evaluate(h2, tx);
                            tmp.Push(h2);
                            accumloss += loss;
                        }

                        Console.WriteLine(accumloss);

                        for (int i = 0; i < s.Count; i++)
                        {
                            model.Backward(tmp.Pop());
                        }

                        model.Update();
                        s.Clear();
                    }

                    if (pos % 100 == 0)
                    {
                        Console.WriteLine(pos + "/" + trainData.Length + " finished");
                    }
                }
            }

            Console.WriteLine("Test Start.");

            Real       sum     = 0;
            int        wnum    = 0;
            List <int> ts      = new List <int>();
            bool       unkWord = false;

            for (int pos = 0; pos < 1000; pos++)
            {
                int id = testData[pos];
                ts.Add(id);

                if (id > trainData.Length)
                {
                    unkWord = true;
                }

                if (id == vocabulary.EosID)
                {
                    if (!unkWord)
                    {
                        Console.WriteLine("pos" + pos);
                        Console.WriteLine("tsLen" + ts.Count);
                        Console.WriteLine("sum" + sum);
                        Console.WriteLine("wnum" + wnum);

                        sum  += CalPs(model, ts);
                        wnum += ts.Count - 1;
                    }
                    else
                    {
                        unkWord = false;
                    }

                    ts.Clear();
                }
            }

            Console.WriteLine(Math.Pow(2.0, sum / wnum));
        }
Ejemplo n.º 17
0
        public static void Run()
        {
            //読み込みたいネットワークの構成を FunctionStack に書き連ね、各 Function のパラメータを合わせる
            //ここで必ず name を Chainer の変数名に合わせておくこと

            FunctionStack nn = new FunctionStack(
                new Convolution2D(1, 2, 3, name: "conv1", gpuEnable: true),//必要であればGPUフラグも忘れずに
                new ReLU(),
                new MaxPooling2D(2, 2),
                new Convolution2D(2, 2, 2, name: "conv2", gpuEnable: true),
                new ReLU(),
                new MaxPooling2D(2, 2),
                new Linear(8, 2, name: "fl3"),
                new ReLU(),
                new Linear(2, 2, name: "fl4")
                );

            /* Chainerでの宣言
             * class NN(chainer.Chain):
             *  def __init__(self):
             *      super(NN, self).__init__(
             *          conv1 = L.Convolution2D(1,2,3),
             *          conv2 = L.Convolution2D(2,2,2),
             *          fl3 = L.Linear(8,2),
             *          fl4 = L.Linear(2,2)
             *      )
             *
             *  def __call__(self, x):
             *      h_conv1 = F.relu(self.conv1(x))
             *      h_pool1 = F.max_pooling_2d(h_conv1, 2)
             *      h_conv2 = F.relu(self.conv2(h_pool1))
             *      h_pool2 = F.max_pooling_2d(h_conv2, 2)
             *      h_fc1 = F.relu(self.fl3(h_pool2))
             *      y = self.fl4(h_fc1)
             *      return y
             */


            //パラメータを読み込み
            ChainerModelDataLoader.ModelLoad(MODEL_FILE_PATH, nn);

            //あとは通常通り使用する
            nn.SetOptimizer(new SGD(0.1));

            //入力データ
            NdArray x = new NdArray(new Real[, , ] {
                {
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.2, 0.9, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.2, 0.8, 0.9, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.1, 0.8, 0.5, 0.8, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.3, 0.3, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.1, 0.0, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.7, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.4, 0.8, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.8, 0.4, 0.1, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.2, 0.8, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.1, 0.8, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.1, 0.7, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0 },
                    { 0.0, 0.0, 0.0, 0.0, 0.0, 0.3, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }
                }
            });

            //教師信号
            Real[] t = { 0.0, 1.0 };

            //訓練を実施
            Trainer.Train(nn, x, t, new MeanSquaredError(), false);

            //結果表示用に退避
            Convolution2D l2 = (Convolution2D)nn.Functions[0];


            //Updateを実行するとgradが消費されてしまうため値を先に出力
            Console.WriteLine("gw1");
            Console.WriteLine(l2.Weight.ToString("Grad"));

            Console.WriteLine("gb1");
            Console.WriteLine(l2.Bias.ToString("Grad"));

            //更新
            nn.Update();

            Console.WriteLine("w1");
            Console.WriteLine(l2.Weight);

            Console.WriteLine("b1");
            Console.WriteLine(l2.Bias);
        }
Ejemplo n.º 18
0
        const int N = 30; //参考先リンクと同様の1000でも動作するがCPUでは遅いので

        public static void Run()
        {
            //MNISTのデータを用意する
            Console.WriteLine("MNIST Data Loading...");
            MnistData mnistData = new MnistData();

            Console.WriteLine("Training Start...");

            //ネットワークの構成を FunctionStack に書き連ねる
            FunctionStack nn = new FunctionStack(
                new Linear(28 * 28, N, name: "l1 Linear"), // L1
                new BatchNormalization(N, name: "l1 BatchNorm"),
                new ReLU(name: "l1 ReLU"),
                new Linear(N, N, name: "l2 Linear"), // L2
                new BatchNormalization(N, name: "l2 BatchNorm"),
                new ReLU(name: "l2 ReLU"),
                new Linear(N, N, name: "l3 Linear"), // L3
                new BatchNormalization(N, name: "l3 BatchNorm"),
                new ReLU(name: "l3 ReLU"),
                new Linear(N, N, name: "l4 Linear"), // L4
                new BatchNormalization(N, name: "l4 BatchNorm"),
                new ReLU(name: "l4 ReLU"),
                new Linear(N, N, name: "l5 Linear"), // L5
                new BatchNormalization(N, name: "l5 BatchNorm"),
                new ReLU(name: "l5 ReLU"),
                new Linear(N, N, name: "l6 Linear"), // L6
                new BatchNormalization(N, name: "l6 BatchNorm"),
                new ReLU(name: "l6 ReLU"),
                new Linear(N, N, name: "l7 Linear"), // L7
                new BatchNormalization(N, name: "l7 BatchNorm"),
                new ReLU(name: "l7 ReLU"),
                new Linear(N, N, name: "l8 Linear"), // L8
                new BatchNormalization(N, name: "l8 BatchNorm"),
                new ReLU(name: "l8 ReLU"),
                new Linear(N, N, name: "l9 Linear"), // L9
                new BatchNormalization(N, name: "l9 BatchNorm"),
                new ReLU(name: "l9 ReLU"),
                new Linear(N, N, name: "l10 Linear"), // L10
                new BatchNormalization(N, name: "l10 BatchNorm"),
                new ReLU(name: "l10 ReLU"),
                new Linear(N, N, name: "l11 Linear"), // L11
                new BatchNormalization(N, name: "l11 BatchNorm"),
                new ReLU(name: "l11 ReLU"),
                new Linear(N, N, name: "l12 Linear"), // L12
                new BatchNormalization(N, name: "l12 BatchNorm"),
                new ReLU(name: "l12 ReLU"),
                new Linear(N, N, name: "l13 Linear"), // L13
                new BatchNormalization(N, name: "l13 BatchNorm"),
                new ReLU(name: "l13 ReLU"),
                new Linear(N, N, name: "l14 Linear"), // L14
                new BatchNormalization(N, name: "l14 BatchNorm"),
                new ReLU(name: "l14 ReLU"),
                new Linear(N, 10, name: "l15 Linear") // L15
                );

            //この構成では学習が進まない
            //FunctionStack nn = new FunctionStack(
            //    new Linear(28 * 28, N), // L1
            //    new ReLU(),
            //    new Linear(N, N), // L2
            //    new ReLU(),
            //
            //    (中略)
            //
            //    new Linear(N, N), // L14
            //    new ReLU(),
            //    new Linear(N, 10) // L15
            //);

            //optimizerを宣言
            nn.SetOptimizer(new AdaGrad());

            //三世代学習
            for (int epoch = 0; epoch < 3; epoch++)
            {
                Console.WriteLine("epoch " + (epoch + 1));

                //全体での誤差を集計
                //List<Real> totalLoss = new List<Real>();
                Real totalLoss        = 0;
                long totalLossCounter = 0;

                //何回バッチを実行するか
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    //訓練データからランダムにデータを取得
                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT);

                    //学習を実行
                    Real sumLoss = Trainer.Train(nn, datasetX.Data, datasetX.Label, new SoftmaxCrossEntropy());
                    totalLoss += sumLoss;
                    totalLossCounter++;

                    //20回バッチを動かしたら精度をテストする
                    if (i % 20 == 0)
                    {
                        //結果出力
                        Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                        Console.WriteLine("total loss " + totalLoss / totalLossCounter);
                        Console.WriteLine("local loss " + sumLoss);
                        Console.WriteLine("");
                        Console.WriteLine("Testing...");

                        //テストデータからランダムにデータを取得
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT);

                        //テストを実行
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 19
0
        private void EmitNewarr(FunctionStack stack, Type elementType)
        {
            var arrayType = GetType(new ArrayType(elementType.TypeReferenceCecil), TypeState.VTableEmitted);

            var numElements = stack.Pop();

            // Compute object size
            var typeSize = LLVM.BuildIntCast(builder, LLVM.SizeOf(elementType.DefaultTypeLLVM), nativeIntLLVM, string.Empty);

            // Compute array size (object size * num elements)
            var numElementsCasted = ConvertToNativeInt(numElements);
            var arraySize = LLVM.BuildMul(builder, typeSize, numElementsCasted, string.Empty);

            // Invoke malloc
            var allocatedData = LLVM.BuildCall(builder, allocObjectFunctionLLVM, new[] { arraySize }, string.Empty);
            var values = LLVM.BuildPointerCast(builder, allocatedData, LLVM.PointerType(elementType.DefaultTypeLLVM, 0), string.Empty);

            var numElementsAsPointer = LLVM.BuildIntToPtr(builder, numElements.Value, intPtrLLVM, string.Empty);

            // Allocate object
            var allocatedObject = AllocateObject(arrayType);

            // Prepare indices
            var indices = new[]
            {
                LLVM.ConstInt(int32LLVM, 0, false),                         // Pointer indirection
                LLVM.ConstInt(int32LLVM, (int)ObjectFields.Data, false),    // Data
                LLVM.ConstInt(int32LLVM, 1, false),                         // Access length
            };

            // Update array with size and 0 data
            var sizeLocation = LLVM.BuildInBoundsGEP(builder, allocatedObject, indices, string.Empty);
            LLVM.BuildStore(builder, numElementsAsPointer, sizeLocation);

            indices[2] = LLVM.ConstInt(int32LLVM, 2, false);                // Access data pointer
            var dataPointerLocation = LLVM.BuildInBoundsGEP(builder, allocatedObject, indices, string.Empty);
            LLVM.BuildStore(builder, values, dataPointerLocation);

            // Push on stack
            stack.Add(new StackValue(StackValueType.Object, arrayType, allocatedObject));
        }
Ejemplo n.º 20
0
        public static void Run()
        {
            Stopwatch sw = new Stopwatch();

            //MNISTのデータを用意する
            Console.WriteLine("MNIST Data Loading...");
            MnistData <Real> mnistData = new MnistData <Real>();

            //ネットワークの構成を FunctionStack に書き連ねる
            FunctionStack <Real> nn = new FunctionStack <Real>(
                new Convolution2D <Real>(1, 32, 5, pad: 2, name: "l1 Conv2D", gpuEnable: true),
                new ReLU <Real>(name: "l1 ReLU"),
                //new AveragePooling(2, 2, name: "l1 AVGPooling"),
                new MaxPooling2D <Real>(2, 2, name: "l1 MaxPooling", gpuEnable: true),
                new Convolution2D <Real>(32, 64, 5, pad: 2, name: "l2 Conv2D", gpuEnable: true),
                new ReLU <Real>(name: "l2 ReLU"),
                //new AveragePooling(2, 2, name: "l2 AVGPooling"),
                new MaxPooling2D <Real>(2, 2, name: "l2 MaxPooling", gpuEnable: true),
                new Linear <Real>(7 * 7 * 64, 1024, name: "l3 Linear", gpuEnable: true),
                new ReLU <Real>(name: "l3 ReLU"),
                new Dropout <Real>(name: "l3 DropOut"),
                new Linear <Real>(1024, 10, name: "l4 Linear", gpuEnable: true)
                );

            //optimizerを宣言
            //nn.SetOptimizer(new Adam<Real>());

            Console.WriteLine("Training Start...");

            //三世代学習
            for (int epoch = 1; epoch < 3; epoch++)
            {
                Console.WriteLine("epoch " + epoch);

                //全体での誤差を集計
                Real totalLoss      = 0;
                long totalLossCount = 0;

                //何回バッチを実行するか
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    sw.Restart();

                    Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);

                    //訓練データからランダムにデータを取得
                    TestDataSet <Real> datasetX = mnistData.Train.GetRandomDataSet(BATCH_DATA_COUNT);

                    //バッチ学習を並列実行する
                    Real sumLoss = Trainer.Train(nn, datasetX, new SoftmaxCrossEntropy <Real>(), new Adam <Real>());
                    totalLoss += sumLoss;
                    totalLossCount++;

                    //結果出力
                    Console.WriteLine("total loss " + totalLoss / totalLossCount);
                    Console.WriteLine("local loss " + sumLoss);

                    sw.Stop();
                    Console.WriteLine("time" + sw.Elapsed.TotalMilliseconds);

                    //20回バッチを動かしたら精度をテストする
                    if (i % 20 == 0)
                    {
                        Console.WriteLine("\nTesting...");

                        //テストデータからランダムにデータを取得
                        TestDataSet <Real> datasetY = mnistData.Eval.GetRandomDataSet(TEACH_DATA_COUNT);

                        //テストを実行
                        Real accuracy = Trainer.Accuracy(nn, datasetY);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 21
0
        private void EmitLdarg(FunctionStack stack, List<StackValue> args, int operandIndex)
        {
            var arg = args[operandIndex];

            // Load value from local argument
            var value = LoadValue(arg.StackType, arg.Value, InstructionFlags.None);

            // Convert from local to stack value
            value = ConvertFromLocalToStack(arg.Type, value);

            // Add value to stack
            stack.Add(new StackValue(arg.StackType, arg.Type, value));
        }
Ejemplo n.º 22
0
        public static void Run()
        {
            Console.WriteLine("Build Vocabulary.");

            Vocabulary vocabulary = new Vocabulary();

            string trainPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + TRAIN_FILE, TRAIN_FILE, TRAIN_FILE_HASH);
            string validPath = InternetFileDownloader.Donwload(DOWNLOAD_URL + VALID_FILE, VALID_FILE, VALID_FILE_HASH);
            string testPath  = InternetFileDownloader.Donwload(DOWNLOAD_URL + TEST_FILE, TEST_FILE, TEST_FILE_HASH);

            int[] trainData = vocabulary.LoadData(trainPath);
            int[] validData = vocabulary.LoadData(validPath);
            int[] testData  = vocabulary.LoadData(testPath);

            int nVocab = vocabulary.Length;

            Console.WriteLine("Network Initilizing.");
            FunctionStack <Real> model = new FunctionStack <Real>(
                new EmbedID <Real>(nVocab, N_UNITS, name: "l1 EmbedID"),
                new Dropout <Real>(),
                new LSTM <Real>(N_UNITS, N_UNITS, name: "l2 LSTM"),
                new Dropout <Real>(),
                new LSTM <Real>(N_UNITS, N_UNITS, name: "l3 LSTM"),
                new Dropout <Real>(),
                new Linear <Real>(N_UNITS, nVocab, name: "l4 Linear")
                );

            for (int i = 0; i < model.Functions.Length; i++)
            {
                for (int j = 0; j < model.Functions[i].Parameters.Length; j++)
                {
                    for (int k = 0; k < model.Functions[i].Parameters[j].Data.Length; k++)
                    {
                        model.Functions[i].Parameters[j].Data[k] = ((Real)Mother.Dice.NextDouble() * 2.0f - 1.0f) / 10.0f;
                    }
                }
            }

            //与えられたthresholdで頭打ちではなく、全パラメータのL2Normからレートを取り補正を行う
            GradientClipping <Real> gradientClipping = new GradientClipping <Real>(threshold: GRAD_CLIP);
            SGD <Real> sgd = new SGD <Real>(learningRate: 0.1f);

            gradientClipping.SetUp(model);
            sgd.SetUp(model);

            Real wholeLen = trainData.Length;
            int  jump     = (int)Math.Floor(wholeLen / BATCH_SIZE);
            int  epoch    = 0;

            Console.WriteLine("Train Start.");

            for (int i = 0; i < jump * N_EPOCH; i++)
            {
                NdArray <Real> x = new NdArray <Real>(new[] { 1 }, BATCH_SIZE);
                NdArray <int>  t = new NdArray <int>(new[] { 1 }, BATCH_SIZE);

                for (int j = 0; j < BATCH_SIZE; j++)
                {
                    x.Data[j] = trainData[(int)((jump * j + i) % wholeLen)];
                    t.Data[j] = trainData[(int)((jump * j + i + 1) % wholeLen)];
                }

                NdArray <Real> result  = model.Forward(x)[0];
                Real           sumLoss = new SoftmaxCrossEntropy <Real>().Evaluate(result, t);
                Console.WriteLine("[{0}/{1}] Loss: {2}", i + 1, jump, sumLoss);
                model.Backward(result);

                //Run truncated BPTT
                if ((i + 1) % BPROP_LEN == 0)
                {
                    gradientClipping.Update();
                    sgd.Update();
                    model.ResetState();
                }

                if ((i + 1) % jump == 0)
                {
                    epoch++;
                    Console.WriteLine("evaluate");
                    Console.WriteLine("validation perplexity: {0}", Evaluate(model, validData));

                    if (epoch >= 6)
                    {
                        sgd.LearningRate /= 1.2f;
                        Console.WriteLine("learning rate =" + sgd.LearningRate);
                    }
                }
            }

            Console.WriteLine("test start");
            Console.WriteLine("test perplexity:" + Evaluate(model, testData));
        }
Ejemplo n.º 23
0
 private void EmitI8(FunctionStack stack, long operandIndex)
 {
     // Add constant integer value to stack
     stack.Add(new StackValue(StackValueType.Int64, int64,
         LLVM.ConstInt(int64LLVM, (ulong)operandIndex, true)));
 }
Ejemplo n.º 24
0
        private void EmitComparison(FunctionStack stack, Code opcode)
        {
            var operand2 = stack.Pop();
            var operand1 = stack.Pop();

            ValueRef value1;
            ValueRef value2;
            GenerateComparableOperands(operand1, operand2, out value1, out value2);

            ValueRef compareResult;
            if (operand1.StackType == StackValueType.Float)
            {
                RealPredicate predicate;
                switch (opcode)
                {
                    case Code.Ceq:      predicate = RealPredicate.RealOEQ; break;
                    case Code.Cgt:      predicate = RealPredicate.RealOGT; break;
                    case Code.Cgt_Un:   predicate = RealPredicate.RealUGT; break;
                    case Code.Clt:      predicate = RealPredicate.RealOLT; break;
                    case Code.Clt_Un:   predicate = RealPredicate.RealULT; break;
                    default:
                        throw new NotSupportedException();
                }
                compareResult = LLVM.BuildFCmp(builder, predicate, value1, value2, string.Empty);
            }
            else
            {
                IntPredicate predicate;
                switch (opcode)
                {
                    case Code.Ceq:      predicate = IntPredicate.IntEQ; break;
                    case Code.Cgt:      predicate = IntPredicate.IntSGT; break;
                    case Code.Cgt_Un:   predicate = IntPredicate.IntUGT; break;
                    case Code.Clt:      predicate = IntPredicate.IntSLT; break;
                    case Code.Clt_Un:   predicate = IntPredicate.IntULT; break;
                    default:
                        throw new NotSupportedException();
                }
                compareResult = LLVM.BuildICmp(builder, predicate, value1, value2, string.Empty);
            }


            // Extends to int32
            compareResult = LLVM.BuildZExt(builder, compareResult, int32LLVM, string.Empty);

            // Push result back on the stack
            stack.Add(new StackValue(StackValueType.Int32, int32, compareResult));
        }
Ejemplo n.º 25
0
 private void EmitR8(FunctionStack stack, double operandIndex)
 {
     // Add constant integer value to stack
     stack.Add(new StackValue(StackValueType.Float, @double,
         LLVM.ConstReal(@double.DataTypeLLVM, operandIndex)));
 }
Ejemplo n.º 26
0
        private void EmitConditionalBranch(FunctionStack stack, BasicBlockRef thenBlock, BasicBlockRef elseBlock, Code opcode)
        {
            var operand2 = stack.Pop();
            var operand1 = stack.Pop();

            ValueRef value1;
            ValueRef value2;
            GenerateComparableOperands(operand1, operand2, out value1, out value2);

            ValueRef compareResult;
            if (operand1.StackType == StackValueType.Float)
            {
                RealPredicate predicate;
                switch (opcode)
                {
                    case Code.Beq:
                    case Code.Beq_S:    predicate = RealPredicate.RealOEQ; break;
                    case Code.Bge:
                    case Code.Bge_S:    predicate = RealPredicate.RealOGE; break;
                    case Code.Bgt:
                    case Code.Bgt_S:    predicate = RealPredicate.RealOGT; break;
                    case Code.Ble:
                    case Code.Ble_S:    predicate = RealPredicate.RealOLE; break;
                    case Code.Blt:
                    case Code.Blt_S:    predicate = RealPredicate.RealOLT; break;
                    case Code.Bne_Un:
                    case Code.Bne_Un_S: predicate = RealPredicate.RealUNE; break;
                    case Code.Bge_Un:
                    case Code.Bge_Un_S: predicate = RealPredicate.RealUGE; break;
                    case Code.Bgt_Un:
                    case Code.Bgt_Un_S: predicate = RealPredicate.RealUGT; break;
                    case Code.Ble_Un:
                    case Code.Ble_Un_S: predicate = RealPredicate.RealULE; break;
                    case Code.Blt_Un:
                    case Code.Blt_Un_S: predicate = RealPredicate.RealULT; break;
                    default:
                        throw new NotSupportedException();
                }
                compareResult = LLVM.BuildFCmp(builder, predicate, value1, value2, string.Empty);
            }
            else
            {
                IntPredicate predicate;
                switch (opcode)
                {
                    case Code.Beq:
                    case Code.Beq_S:    predicate = IntPredicate.IntEQ; break;
                    case Code.Bge:
                    case Code.Bge_S:    predicate = IntPredicate.IntSGE; break;
                    case Code.Bgt:
                    case Code.Bgt_S:    predicate = IntPredicate.IntSGT; break;
                    case Code.Ble:
                    case Code.Ble_S:    predicate = IntPredicate.IntSLE; break;
                    case Code.Blt:
                    case Code.Blt_S:    predicate = IntPredicate.IntSLT; break;
                    case Code.Bne_Un:
                    case Code.Bne_Un_S: predicate = IntPredicate.IntNE; break;
                    case Code.Bge_Un:
                    case Code.Bge_Un_S: predicate = IntPredicate.IntUGE; break;
                    case Code.Bgt_Un:
                    case Code.Bgt_Un_S: predicate = IntPredicate.IntUGT; break;
                    case Code.Ble_Un:
                    case Code.Ble_Un_S: predicate = IntPredicate.IntULE; break;
                    case Code.Blt_Un:
                    case Code.Blt_Un_S: predicate = IntPredicate.IntULT; break;
                    default:
                        throw new NotSupportedException();
                }
                compareResult = LLVM.BuildICmp(builder, predicate, value1, value2, string.Empty);
            }

            // Branch depending on previous test
            LLVM.BuildCondBr(builder, compareResult, thenBlock, elseBlock);
        }
Ejemplo n.º 27
0
        public static void Run()
        {
            //MNISTのデータを用意する
            Console.WriteLine("MNIST Data Loading...");
            MnistData <Real> mnistData = new MnistData <Real>();

            Console.WriteLine("Training Start...");

            //ネットワークの構成を FunctionStack に書き連ねる
            FunctionStack <Real> Layer1 = new FunctionStack <Real>(
                new Linear <Real>(28 * 28, 256, name: "l1 Linear"),
                new BatchNormalization <Real>(256, name: "l1 Norm"),
                new ReLU <Real>(name: "l1 ReLU")
                );

            FunctionStack <Real> Layer2 = new FunctionStack <Real>(
                new Linear <Real>(256, 256, name: "l2 Linear"),
                new BatchNormalization <Real>(256, name: "l2 Norm"),
                new ReLU <Real>(name: "l2 ReLU")
                );

            FunctionStack <Real> Layer3 = new FunctionStack <Real>(
                new Linear <Real>(256, 256, name: "l3 Linear"),
                new BatchNormalization <Real>(256, name: "l3 Norm"),
                new ReLU <Real>(name: "l3 ReLU")
                );

            FunctionStack <Real> Layer4 = new FunctionStack <Real>(
                new Linear <Real>(256, 10, name: "l4 Linear")
                );

            //FunctionStack自身もFunctionとして積み上げられる
            FunctionStack <Real> nn = new FunctionStack <Real>
                                      (
                Layer1,
                Layer2,
                Layer3,
                Layer4
                                      );

            FunctionStack <Real> cDNI1 = new FunctionStack <Real>(
                new Linear <Real>(256 + 10, 1024, name: "cDNI1 Linear1"),
                new BatchNormalization <Real>(1024, name: "cDNI1 Nrom1"),
                new ReLU <Real>(name: "cDNI1 ReLU1"),
                new Linear <Real>(1024, 256, initialW: new Real[1024, 256], name: "DNI1 Linear3")
                );

            FunctionStack <Real> cDNI2 = new FunctionStack <Real>(
                new Linear <Real>(256 + 10, 1024, name: "cDNI2 Linear1"),
                new BatchNormalization <Real>(1024, name: "cDNI2 Nrom1"),
                new ReLU <Real>(name: "cDNI2 ReLU1"),
                new Linear <Real>(1024, 256, initialW: new Real[1024, 256], name: "cDNI2 Linear3")
                );

            FunctionStack <Real> cDNI3 = new FunctionStack <Real>(
                new Linear <Real>(256 + 10, 1024, name: "cDNI3 Linear1"),
                new BatchNormalization <Real>(1024, name: "cDNI3 Nrom1"),
                new ReLU <Real>(name: "cDNI3 ReLU1"),
                new Linear <Real>(1024, 256, initialW: new Real[1024, 256], name: "cDNI3 Linear3")
                );

            //optimizerを宣言
            //optimizerを宣言
            Adam <Real> L1adam = new Adam <Real>(0.00003f);
            Adam <Real> L2adam = new Adam <Real>(0.00003f);
            Adam <Real> L3adam = new Adam <Real>(0.00003f);
            Adam <Real> L4adam = new Adam <Real>(0.00003f);

            L1adam.SetUp(Layer1);
            L2adam.SetUp(Layer2);
            L3adam.SetUp(Layer3);
            L4adam.SetUp(Layer4);

            Adam <Real> cDNI1adam = new Adam <Real>(0.00003f);
            Adam <Real> cDNI2adam = new Adam <Real>(0.00003f);
            Adam <Real> cDNI3adam = new Adam <Real>(0.00003f);

            cDNI1adam.SetUp(cDNI1);
            cDNI2adam.SetUp(cDNI2);
            cDNI3adam.SetUp(cDNI3);

            for (int epoch = 0; epoch < 10; epoch++)
            {
                Console.WriteLine("epoch " + (epoch + 1));

                //全体での誤差を集計
                Real totalLoss      = 0;
                Real cDNI1totalLoss = 0;
                Real cDNI2totalLoss = 0;
                Real cDNI3totalLoss = 0;

                long totalLossCount      = 0;
                long cDNI1totalLossCount = 0;
                long cDNI2totalLossCount = 0;
                long cDNI3totalLossCount = 0;


                //何回バッチを実行するか
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    //訓練データからランダムにデータを取得
                    TestDataSet <Real> datasetX = mnistData.Train.GetRandomDataSet(BATCH_DATA_COUNT);

                    //第一層を実行
                    NdArray <Real> layer1ForwardResult = Layer1.Forward(datasetX.Data)[0];
                    ResultDataSet  layer1ResultDataSet = new ResultDataSet(layer1ForwardResult, datasetX.Label);

                    //第一層の傾きを取得
                    NdArray <Real> cDNI1Result = cDNI1.Forward(layer1ResultDataSet.GetTrainData())[0];

                    //第一層の傾きを適用
                    layer1ForwardResult.Grad = cDNI1Result.Data.ToArray();

                    //第一層を更新
                    Layer1.Backward(layer1ForwardResult);
                    layer1ForwardResult.ParentFunc = null;
                    L1adam.Update();

                    //第二層を実行
                    NdArray <Real> layer2ForwardResult = Layer2.Forward(layer1ResultDataSet.Result)[0];
                    ResultDataSet  layer2ResultDataSet = new ResultDataSet(layer2ForwardResult, layer1ResultDataSet.Label);

                    //第二層の傾きを取得
                    NdArray <Real> cDNI2Result = cDNI2.Forward(layer2ResultDataSet.GetTrainData())[0];

                    //第二層の傾きを適用
                    layer2ForwardResult.Grad = cDNI2Result.Data.ToArray();

                    //第二層を更新
                    Layer2.Backward(layer2ForwardResult);
                    layer2ForwardResult.ParentFunc = null;


                    //第一層用のcDNIの学習を実行
                    Real cDNI1loss = new MeanSquaredError <Real>().Evaluate(cDNI1Result, new NdArray <Real>(layer1ResultDataSet.Result.Grad, cDNI1Result.Shape, cDNI1Result.BatchCount));

                    L2adam.Update();

                    cDNI1.Backward(cDNI1Result);
                    cDNI1adam.Update();

                    cDNI1totalLoss += cDNI1loss;
                    cDNI1totalLossCount++;

                    //第三層を実行
                    NdArray <Real> layer3ForwardResult = Layer3.Forward(layer2ResultDataSet.Result)[0];
                    ResultDataSet  layer3ResultDataSet = new ResultDataSet(layer3ForwardResult, layer2ResultDataSet.Label);

                    //第三層の傾きを取得
                    NdArray <Real> cDNI3Result = cDNI3.Forward(layer3ResultDataSet.GetTrainData())[0];

                    //第三層の傾きを適用
                    layer3ForwardResult.Grad = cDNI3Result.Data.ToArray();

                    //第三層を更新
                    Layer3.Backward(layer3ForwardResult);
                    layer3ForwardResult.ParentFunc = null;

                    //第二層用のcDNIの学習を実行
                    Real cDNI2loss = new MeanSquaredError <Real>().Evaluate(cDNI2Result, new NdArray <Real>(layer2ResultDataSet.Result.Grad, cDNI2Result.Shape, cDNI2Result.BatchCount));

                    L3adam.Update();

                    cDNI2.Backward(cDNI2Result);
                    cDNI2adam.Update();

                    cDNI2totalLoss += cDNI2loss;
                    cDNI2totalLossCount++;

                    //第四層を実行
                    NdArray <Real> layer4ForwardResult = Layer4.Forward(layer3ResultDataSet.Result)[0];

                    //第四層の傾きを取得
                    Real sumLoss = new SoftmaxCrossEntropy <Real>().Evaluate(layer4ForwardResult, layer3ResultDataSet.Label);

                    //第四層を更新
                    Layer4.Backward(layer4ForwardResult);
                    layer4ForwardResult.ParentFunc = null;

                    totalLoss += sumLoss;
                    totalLossCount++;

                    //第三層用のcDNIの学習を実行
                    Real cDNI3loss = new MeanSquaredError <Real>().Evaluate(cDNI3Result, new NdArray <Real>(layer3ResultDataSet.Result.Grad, cDNI3Result.Shape, cDNI3Result.BatchCount));

                    L4adam.Update();

                    cDNI3.Backward(cDNI3Result);
                    cDNI3adam.Update();

                    cDNI3totalLoss += cDNI3loss;
                    cDNI3totalLossCount++;

                    Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                    //結果出力
                    Console.WriteLine("total loss " + totalLoss / totalLossCount);
                    Console.WriteLine("local loss " + sumLoss);

                    Console.WriteLine("\ncDNI1 total loss " + cDNI1totalLoss / cDNI1totalLossCount);
                    Console.WriteLine("cDNI2 total loss " + cDNI2totalLoss / cDNI2totalLossCount);
                    Console.WriteLine("cDNI3 total loss " + cDNI3totalLoss / cDNI3totalLossCount);

                    Console.WriteLine("\ncDNI1 local loss " + cDNI1loss);
                    Console.WriteLine("cDNI2 local loss " + cDNI2loss);
                    Console.WriteLine("cDNI3 local loss " + cDNI3loss);

                    //20回バッチを動かしたら精度をテストする
                    if (i % 20 == 0)
                    {
                        Console.WriteLine("\nTesting...");

                        //テストデータからランダムにデータを取得
                        TestDataSet <Real> datasetY = mnistData.Eval.GetRandomDataSet(TEST_DATA_COUNT);

                        //テストを実行
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 28
0
        private void EmitLocalloc(FunctionStack stack)
        {
            var numElements = stack.Pop();

            ValueRef numElementsCasted;
            if (numElements.StackType == StackValueType.NativeInt)
            {
                numElementsCasted = LLVM.BuildPtrToInt(builder, numElements.Value, int32LLVM, string.Empty);
            }
            else
            {
                numElementsCasted = LLVM.BuildIntCast(builder, numElements.Value, int32LLVM, string.Empty);
            }

            var alloca = LLVM.BuildArrayAlloca(builder, LLVM.Int8TypeInContext(context), numElementsCasted, string.Empty);
            alloca = LLVM.BuildPointerCast(builder, alloca, intPtr.DataTypeLLVM, string.Empty);

            stack.Add(new StackValue(StackValueType.NativeInt, intPtr, alloca));
        }
Ejemplo n.º 29
0
 private void EmitBrtrue(FunctionStack stack, BasicBlockRef targetBasicBlock, BasicBlockRef nextBasicBlock)
 {
     // Stack element should be different from zero.
     EmitBrCommon(stack.Pop(), IntPredicate.IntNE, targetBasicBlock, nextBasicBlock);
 }
Ejemplo n.º 30
0
        private void EmitStsfld(FunctionStack stack, Field field, InstructionFlags instructionFlags)
        {
            var value = stack.Pop();

            var runtimeTypeInfoGlobal = GetClass(field.DeclaringType).GeneratedEETypeRuntimeLLVM;

            // Get static field GEP indices
            var indices = BuildStaticFieldIndices(field);

            // Find static field address in runtime type info
            var fieldAddress = LLVM.BuildInBoundsGEP(builder, runtimeTypeInfoGlobal, indices, string.Empty);

            // Convert stack value to appropriate type
            var fieldValue = ConvertFromStackToLocal(field.Type, value);

            // Store value in static field
            StoreValue(field.Type.StackType, fieldValue, fieldAddress, instructionFlags);
        }
Ejemplo n.º 31
0
        private void EmitLdfld(FunctionStack stack, Field field, InstructionFlags instructionFlags)
        {
            var @object = stack.Pop();

            // Compute field address
            var fieldAddress = ComputeFieldAddress(builder, field, @object.StackType, @object.Value, ref instructionFlags);

            // Load value from field and create "fake" local
            var value = LoadValue(field.Type.StackType, fieldAddress, instructionFlags);

            // Convert from local to stack value
            value = ConvertFromLocalToStack(field.Type, value);

            // Add value to stack
            stack.Add(new StackValue(field.Type.StackType, field.Type, value));
        }
Ejemplo n.º 32
0
        private void EmitLdsflda(FunctionStack stack, Field field)
        {
            var runtimeTypeInfoGlobal = GetClass(field.DeclaringType).GeneratedEETypeRuntimeLLVM;

            var refType = GetType(field.Type.TypeReferenceCecil.MakeByReferenceType(), TypeState.Opaque);

            // Get static field GEP indices
            var indices = BuildStaticFieldIndices(field);

            // Find static field address in runtime type info
            var staticFieldAddress = LLVM.BuildInBoundsGEP(builder, runtimeTypeInfoGlobal, indices, string.Empty);

            // Add value to stack
            stack.Add(new StackValue(StackValueType.Reference, refType, staticFieldAddress));
        }
Ejemplo n.º 33
0
        private void EmitStind(FunctionCompilerContext functionContext, FunctionStack stack, Code opcode)
        {
            var value = stack.Pop();
            var address = stack.Pop();

            // Determine type
            Type type;
            switch (opcode)
            {
                case Code.Stind_I: type = intPtr; break;
                case Code.Stind_I1: type = int8; break;
                case Code.Stind_I2: type = int16; break;
                case Code.Stind_I4: type = int32; break;
                case Code.Stind_I8: type = int64; break;
                case Code.Stind_R4: type = @float; break;
                case Code.Stind_R8: type = @double; break;
                case Code.Stind_Ref:
                    type = value.Type;
                    break;
                default:
                    throw new ArgumentException("opcode");
            }

            if (CharUsesUTF8)
            {
                if (opcode == Code.Stind_I2 && address.Type.TypeReferenceCecil.FullName == typeof(char*).FullName)
                {
                    type = int8;
                }
            }

            // Convert to local type
            var sourceValue = ConvertFromStackToLocal(type, value);

            // Store value at address
            var pointerCast = LLVM.BuildPointerCast(builder, address.Value, LLVM.PointerType(LLVM.TypeOf(sourceValue), 0), string.Empty);
            StoreValue(type.StackType, sourceValue, pointerCast, functionContext.InstructionFlags);
            functionContext.InstructionFlags = InstructionFlags.None;
        }
Ejemplo n.º 34
0
        public static void Run()
        {
            //Prepare MNIST data
            Console.WriteLine("MNIST Data Loading...");
            MnistData mnistData = new MnistData();


            Console.WriteLine("Training Start...");

            //Writing the network configuration in FunctionStack
            FunctionStack Layer1 = new FunctionStack(
                new Linear(28 * 28, 256, name: "l1 Linear"),
                new BatchNormalization(256, name: "l1 Norm"),
                new ReLU(name: "l1 ReLU")
                );

            FunctionStack Layer2 = new FunctionStack(
                new Linear(256, 256, name: "l2 Linear"),
                new BatchNormalization(256, name: "l2 Norm"),
                new ReLU(name: "l2 ReLU")
                );

            FunctionStack Layer3 = new FunctionStack(
                new Linear(256, 256, name: "l3 Linear"),
                new BatchNormalization(256, name: "l3 Norm"),
                new ReLU(name: "l3 ReLU")
                );

            FunctionStack Layer4 = new FunctionStack(
                new Linear(256, 10, name: "l4 Linear")
                );

            //FunctionStack itself is also stacked as Function
            FunctionStack nn = new FunctionStack
                               (
                Layer1,
                Layer2,
                Layer3,
                Layer4
                               );

            FunctionStack DNI1 = new FunctionStack(
                new Linear(256, 1024, name: "DNI1 Linear1"),
                new BatchNormalization(1024, name: "DNI1 Nrom1"),
                new ReLU(name: "DNI1 ReLU1"),
                new Linear(1024, 1024, name: "DNI1 Linear2"),
                new BatchNormalization(1024, name: "DNI1 Nrom2"),
                new ReLU(name: "DNI1 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI1 Linear3")
                );

            FunctionStack DNI2 = new FunctionStack(
                new Linear(256, 1024, name: "DNI2 Linear1"),
                new BatchNormalization(1024, name: "DNI2 Nrom1"),
                new ReLU(name: "DNI2 ReLU1"),
                new Linear(1024, 1024, name: "DNI2 Linear2"),
                new BatchNormalization(1024, name: "DNI2 Nrom2"),
                new ReLU(name: "DNI2 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI2 Linear3")
                );

            FunctionStack DNI3 = new FunctionStack(
                new Linear(256, 1024, name: "DNI3 Linear1"),
                new BatchNormalization(1024, name: "DNI3 Nrom1"),
                new ReLU(name: "DNI3 ReLU1"),
                new Linear(1024, 1024, name: "DNI3 Linear2"),
                new BatchNormalization(1024, name: "DNI3 Nrom2"),
                new ReLU(name: "DNI3 ReLU2"),
                new Linear(1024, 256, initialW: new Real[1024, 256], name: "DNI3 Linear3")
                );

            //Declare optimizer
            Layer1.SetOptimizer(new Adam());
            Layer2.SetOptimizer(new Adam());
            Layer3.SetOptimizer(new Adam());
            Layer4.SetOptimizer(new Adam());

            DNI1.SetOptimizer(new Adam());
            DNI2.SetOptimizer(new Adam());
            DNI3.SetOptimizer(new Adam());

            //Three generations learning
            for (int epoch = 0; epoch < 20; epoch++)
            {
                Console.WriteLine("epoch " + (epoch + 1));

                Real totalLoss     = 0;
                Real DNI1totalLoss = 0;
                Real DNI2totalLoss = 0;
                Real DNI3totalLoss = 0;

                long totalLossCount     = 0;
                long DNI1totalLossCount = 0;
                long DNI2totalLossCount = 0;
                long DNI3totalLossCount = 0;

                //How many times to run the batch
                for (int i = 1; i < TRAIN_DATA_COUNT + 1; i++)
                {
                    //Get data randomly from training data
                    TestDataSet datasetX = mnistData.GetRandomXSet(BATCH_DATA_COUNT);

                    //Run first tier
                    NdArray[] layer1ForwardResult = Layer1.Forward(datasetX.Data);

                    //Get the inclination of the first layer
                    NdArray[] DNI1Result = DNI1.Forward(layer1ForwardResult);

                    //Apply the inclination of the first layer
                    layer1ForwardResult[0].Grad = DNI1Result[0].Data.ToArray();

                    //Update first layer
                    Layer1.Backward(layer1ForwardResult);
                    layer1ForwardResult[0].ParentFunc = null; //I ran Backward, so I cut off the calculation graph
                    Layer1.Update();

                    //Run Layer 2
                    NdArray[] layer2ForwardResult = Layer2.Forward(layer1ForwardResult);

                    //Get inclination of second layer
                    NdArray[] DNI2Result = DNI2.Forward(layer2ForwardResult);

                    //Apply the inclination of the second layer
                    layer2ForwardResult[0].Grad = DNI2Result[0].Data.ToArray();

                    //Update 2nd tier
                    Layer2.Backward(layer2ForwardResult);
                    layer2ForwardResult[0].ParentFunc = null;

                    //Perform learning of first layer DNI
                    Real DNI1loss = new MeanSquaredError().Evaluate(DNI1Result, new NdArray(layer1ForwardResult[0].Grad, DNI1Result[0].Shape, DNI1Result[0].BatchCount));

                    Layer2.Update();

                    DNI1.Backward(DNI1Result);
                    DNI1.Update();

                    DNI1totalLoss += DNI1loss;
                    DNI1totalLossCount++;

                    //Run Third Tier
                    NdArray[] layer3ForwardResult = Layer3.Forward(layer2ForwardResult);

                    //Get the inclination of the third layer
                    NdArray[] DNI3Result = DNI3.Forward(layer3ForwardResult);

                    //Apply the inclination of the third layer
                    layer3ForwardResult[0].Grad = DNI3Result[0].Data.ToArray();

                    //Update third layer
                    Layer3.Backward(layer3ForwardResult);
                    layer3ForwardResult[0].ParentFunc = null;

                    //Perform learning of DNI for layer 2
                    Real DNI2loss = new MeanSquaredError().Evaluate(DNI2Result, new NdArray(layer2ForwardResult[0].Grad, DNI2Result[0].Shape, DNI2Result[0].BatchCount));

                    Layer3.Update();

                    DNI2.Backward(DNI2Result);
                    DNI2.Update();

                    DNI2totalLoss += DNI2loss;
                    DNI2totalLossCount++;

                    //Run Layer 4
                    NdArray[] layer4ForwardResult = Layer4.Forward(layer3ForwardResult);

                    //Get inclination of the fourth layer
                    Real sumLoss = new SoftmaxCrossEntropy().Evaluate(layer4ForwardResult, datasetX.Label);

                    //Update fourth layer
                    Layer4.Backward(layer4ForwardResult);
                    layer4ForwardResult[0].ParentFunc = null;

                    totalLoss += sumLoss;
                    totalLossCount++;

                    //Perform DNI learning for layer 3
                    Real DNI3loss = new MeanSquaredError().Evaluate(DNI3Result, new NdArray(layer3ForwardResult[0].Grad, DNI3Result[0].Shape, DNI3Result[0].BatchCount));

                    Layer4.Update();

                    DNI3.Backward(DNI3Result);
                    DNI3.Update();

                    DNI3totalLoss += DNI3loss;
                    DNI3totalLossCount++;

                    Console.WriteLine("\nbatch count " + i + "/" + TRAIN_DATA_COUNT);
                    //Result output
                    Console.WriteLine("total loss " + totalLoss / totalLossCount);
                    Console.WriteLine("local loss " + sumLoss);

                    Console.WriteLine("\nDNI1 total loss " + DNI1totalLoss / DNI1totalLossCount);
                    Console.WriteLine("DNI2 total loss " + DNI2totalLoss / DNI2totalLossCount);
                    Console.WriteLine("DNI3 total loss " + DNI3totalLoss / DNI3totalLossCount);

                    Console.WriteLine("\nDNI1 local loss " + DNI1loss);
                    Console.WriteLine("DNI2 local loss " + DNI2loss);
                    Console.WriteLine("DNI3 local loss " + DNI3loss);

                    //Test the accuracy if you move the batch 20 times
                    if (i % 20 == 0)
                    {
                        Console.WriteLine("\nTesting...");

                        //Get data randomly from test data
                        TestDataSet datasetY = mnistData.GetRandomYSet(TEST_DATA_COUNT);

                        //Run test
                        Real accuracy = Trainer.Accuracy(nn, datasetY.Data, datasetY.Label);
                        Console.WriteLine("accuracy " + accuracy);
                    }
                }
            }
        }
Ejemplo n.º 35
0
        private void EmitLdsfld(FunctionStack stack, Field field, InstructionFlags instructionFlags)
        {
            var runtimeTypeInfoGlobal = GetClass(field.DeclaringType).GeneratedEETypeRuntimeLLVM;

            // Get static field GEP indices
            var indices = BuildStaticFieldIndices(field);

            // Find static field address in runtime type info
            var staticFieldAddress = LLVM.BuildInBoundsGEP(builder, runtimeTypeInfoGlobal, indices, string.Empty);

            // Load value from field and create "fake" local
            var value = LoadValue(field.Type.StackType, staticFieldAddress, instructionFlags);

            // Convert from local to stack value
            value = ConvertFromLocalToStack(field.Type, value);

            // Add value to stack
            stack.Add(new StackValue(field.Type.StackType, field.Type, value));
        }
Ejemplo n.º 36
0
        private void EmitLdind(FunctionCompilerContext functionContext, FunctionStack stack, Code opcode)
        {
            var address = stack.Pop();

            // Determine type
            Type type;
            switch (opcode)
            {
                case Code.Ldind_I: type = intPtr; break;
                case Code.Ldind_I1: type = int8; break;
                case Code.Ldind_I2: type = int16; break;
                case Code.Ldind_I4: type = int32; break;
                case Code.Ldind_I8: type = int64; break;
                case Code.Ldind_U1: type = int8; break;
                case Code.Ldind_U2: type = int16; break;
                case Code.Ldind_U4: type = int32; break;
                case Code.Ldind_R4: type = @float; break;
                case Code.Ldind_R8: type = @double; break;
                case Code.Ldind_Ref:
                    type = GetType(((ByReferenceType)address.Type.TypeReferenceCecil).ElementType, TypeState.StackComplete);
                    break;
                default:
                    throw new ArgumentException("opcode");
            }

            if (CharUsesUTF8)
            {
                if (opcode == Code.Ldind_I2 && address.Type.TypeReferenceCecil.FullName == typeof(char*).FullName)
                {
                    type = int8;
                }
            }

            // Load value at address
            var pointerCast = LLVM.BuildPointerCast(builder, address.Value, LLVM.PointerType(type.DefaultTypeLLVM, 0), string.Empty);

            var loadInst = LoadValue(type.StackType, pointerCast, functionContext.InstructionFlags);
            functionContext.InstructionFlags = InstructionFlags.None;

            // Convert to stack type
            var value = ConvertFromLocalToStack(type, loadInst);

            // Add to stack
            stack.Add(new StackValue(type.StackType, type, value));
        }
Ejemplo n.º 37
0
        private void EmitConv(FunctionStack stack, Code opcode)
        {
            var value = stack.Pop();

            // Special case: string contains an extra indirection to access its first character.
            // We resolve it on conv.i.
            if (stringSliceable
                && value.Type.TypeReferenceCecil.FullName == typeof(string).FullName
                && (opcode == Code.Conv_I || opcode == Code.Conv_U))
            {
                // Prepare indices
                var indices = new[]
                {
                    LLVM.ConstInt(int32LLVM, 0, false),                         // Pointer indirection
                    LLVM.ConstInt(int32LLVM, (int)ObjectFields.Data, false),    // Data
                    LLVM.ConstInt(int32LLVM, 2, false),                         // Access string pointer
                };

                var charPointerLocation = LLVM.BuildInBoundsGEP(builder, value.Value, indices, string.Empty);
                var firstCharacterPointer = LLVM.BuildLoad(builder, charPointerLocation, string.Empty);

                stack.Add(new StackValue(StackValueType.NativeInt, intPtr, firstCharacterPointer));
                return;
            }

            uint intermediateWidth;
            bool isSigned;
            bool isOverflow = false;

            switch (opcode)
            {
                case Code.Conv_U: isSigned = false; intermediateWidth = (uint)intPtrSize * 8; break;
                case Code.Conv_I: isSigned = true; intermediateWidth = (uint)intPtrSize * 8; break;
                case Code.Conv_U1: isSigned = false; intermediateWidth = 8; break;
                case Code.Conv_I1: isSigned = true; intermediateWidth = 8; break;
                case Code.Conv_U2: isSigned = false; intermediateWidth = 16; break;
                case Code.Conv_I2: isSigned = true; intermediateWidth = 16; break;
                case Code.Conv_U4: isSigned = false; intermediateWidth = 32; break;
                case Code.Conv_I4: isSigned = true; intermediateWidth = 32; break;
                case Code.Conv_U8: isSigned = false; intermediateWidth = 64; break;
                case Code.Conv_I8: isSigned = true; intermediateWidth = 64; break;
                case Code.Conv_Ovf_U:  isOverflow = true; goto case Code.Conv_U;
                case Code.Conv_Ovf_I:  isOverflow = true; goto case Code.Conv_I;
                case Code.Conv_Ovf_U1: isOverflow = true; goto case Code.Conv_U1;
                case Code.Conv_Ovf_I1: isOverflow = true; goto case Code.Conv_I1;
                case Code.Conv_Ovf_U2: isOverflow = true; goto case Code.Conv_U2;
                case Code.Conv_Ovf_I2: isOverflow = true; goto case Code.Conv_I2;
                case Code.Conv_Ovf_U4: isOverflow = true; goto case Code.Conv_U4;
                case Code.Conv_Ovf_I4: isOverflow = true; goto case Code.Conv_I4;
                case Code.Conv_Ovf_U8: isOverflow = true; goto case Code.Conv_U8;
                case Code.Conv_Ovf_I8: isOverflow = true; goto case Code.Conv_I8;
                case Code.Conv_Ovf_U_Un:  isOverflow = true; goto case Code.Conv_U;
                case Code.Conv_Ovf_I_Un:  isOverflow = true; goto case Code.Conv_I;
                case Code.Conv_Ovf_U1_Un: isOverflow = true; goto case Code.Conv_U1;
                case Code.Conv_Ovf_I1_Un: isOverflow = true; goto case Code.Conv_I1;
                case Code.Conv_Ovf_U2_Un: isOverflow = true; goto case Code.Conv_U2;
                case Code.Conv_Ovf_I2_Un: isOverflow = true; goto case Code.Conv_I2;
                case Code.Conv_Ovf_U4_Un: isOverflow = true; goto case Code.Conv_U4;
                case Code.Conv_Ovf_I4_Un: isOverflow = true; goto case Code.Conv_I4;
                case Code.Conv_Ovf_U8_Un: isOverflow = true; goto case Code.Conv_U8;
                case Code.Conv_Ovf_I8_Un: isOverflow = true; goto case Code.Conv_I8;
                case Code.Conv_R4:
                case Code.Conv_R8:
                    var inputTypeFullName = value.Type.TypeReferenceCecil.FullName;
                    isSigned = inputTypeFullName == typeof(int).FullName
                        || inputTypeFullName == typeof(short).FullName
                        || inputTypeFullName == typeof(sbyte).FullName
                        || inputTypeFullName == typeof(IntPtr).FullName;
                    intermediateWidth = 0; // unknown yet, depends on input
                    break;
                case Code.Conv_R_Un:
                    // TODO: Not sure if this is exactly what Conv_R_Un should do...
                    isSigned = false;
                    intermediateWidth = 0;
                    break;
                default:
                    throw new InvalidOperationException();
            }


            var currentValue = value.Value;

            if (value.StackType == StackValueType.NativeInt)
            {
                // Convert to integer
                currentValue = LLVM.BuildPtrToInt(builder, currentValue, nativeIntLLVM, string.Empty);
            }
            else if (value.StackType == StackValueType.Reference
                || value.StackType == StackValueType.Object)
            {
                if (opcode != Code.Conv_U8 && opcode != Code.Conv_U
                    && opcode != Code.Conv_I8 && opcode != Code.Conv_I)
                    throw new InvalidOperationException();

                // Convert to integer
                currentValue = LLVM.BuildPtrToInt(builder, currentValue, nativeIntLLVM, string.Empty);
            }
            else if (value.StackType == StackValueType.Float)
            {
                if (opcode == Code.Conv_R4 || opcode == Code.Conv_R8)
                {
                    // Special case: float to float, avoid usual case that goes through an intermediary integer.
                    var outputType = opcode == Code.Conv_R8 ? @double : @float;
                    currentValue = LLVM.BuildFPCast(builder, currentValue, outputType.DataTypeLLVM, string.Empty);
                    stack.Add(new StackValue(StackValueType.Float, outputType, currentValue));
                    return;
                }

                // TODO: Float conversions
                currentValue = isSigned
                    ? LLVM.BuildFPToSI(builder, currentValue, LLVM.IntTypeInContext(context, intermediateWidth), string.Empty)
                    : LLVM.BuildFPToUI(builder, currentValue, LLVM.IntTypeInContext(context, intermediateWidth), string.Empty);
            }

            var inputType = LLVM.TypeOf(currentValue);
            var inputWidth = LLVM.GetIntTypeWidth(inputType);

            // Auto-adapt intermediate width for floats
            if (opcode == Code.Conv_R4 || opcode == Code.Conv_R8 || opcode == Code.Conv_R_Un)
            {
                intermediateWidth = inputWidth;
            }

            var smallestWidth = Math.Min(intermediateWidth, inputWidth);
            var smallestType = LLVM.IntTypeInContext(context, smallestWidth);
            var outputWidth = Math.Max(intermediateWidth, 32);

            // Truncate (if necessary)
            if (smallestWidth < inputWidth)
                currentValue = LLVM.BuildTrunc(builder, currentValue, smallestType, string.Empty);

            if (isOverflow)
            {
                // TODO: Compare currentValue with pre-trunc value?
            }

            // Reextend to appropriate type (if necessary)
            if (outputWidth > smallestWidth)
            {
                var outputIntType = LLVM.IntTypeInContext(context, outputWidth);
                if (isSigned)
                    currentValue = LLVM.BuildSExt(builder, currentValue, outputIntType, string.Empty);
                else
                    currentValue = LLVM.BuildZExt(builder, currentValue, outputIntType, string.Empty);
            }

            // Add constant integer value to stack
            switch (opcode)
            {
                case Code.Conv_U:
                case Code.Conv_I:
                case Code.Conv_Ovf_U:
                case Code.Conv_Ovf_I:
                case Code.Conv_Ovf_U_Un:
                case Code.Conv_Ovf_I_Un:
                    // Convert to native int (if necessary)
                    currentValue = LLVM.BuildIntToPtr(builder, currentValue, intPtrLLVM, string.Empty);
                    stack.Add(new StackValue(StackValueType.NativeInt, intPtr, currentValue));
                    break;
                case Code.Conv_U1:
                case Code.Conv_I1:
                case Code.Conv_U2:
                case Code.Conv_I2:
                case Code.Conv_U4:
                case Code.Conv_I4:
                case Code.Conv_Ovf_U1:
                case Code.Conv_Ovf_I1:
                case Code.Conv_Ovf_U2:
                case Code.Conv_Ovf_I2:
                case Code.Conv_Ovf_U4:
                case Code.Conv_Ovf_I4:
                case Code.Conv_Ovf_U1_Un:
                case Code.Conv_Ovf_I1_Un:
                case Code.Conv_Ovf_U2_Un:
                case Code.Conv_Ovf_I2_Un:
                case Code.Conv_Ovf_U4_Un:
                case Code.Conv_Ovf_I4_Un:
                    stack.Add(new StackValue(StackValueType.Int32, int32, currentValue));
                    break;
                case Code.Conv_U8:
                case Code.Conv_I8:
                case Code.Conv_Ovf_U8:
                case Code.Conv_Ovf_I8:
                case Code.Conv_Ovf_U8_Un:
                case Code.Conv_Ovf_I8_Un:
                    stack.Add(new StackValue(StackValueType.Int64, int64, currentValue));
                    break;
                case Code.Conv_R4:
                case Code.Conv_R8:
                case Code.Conv_R_Un:
                    var outputType = opcode == Code.Conv_R8 || opcode == Code.Conv_R_Un ? @double : @float;
                    if (isSigned)
                        currentValue = LLVM.BuildSIToFP(builder, currentValue, outputType.DataTypeLLVM, string.Empty);
                    else
                        currentValue = LLVM.BuildUIToFP(builder, currentValue, outputType.DataTypeLLVM, string.Empty);
                    stack.Add(new StackValue(StackValueType.Float, outputType, currentValue));
                    break;
                default:
                    throw new InvalidOperationException();
            }
        }
Ejemplo n.º 38
0
 private void EmitLdftn(FunctionStack stack, Function targetMethod)
 {
     stack.Add(new StackValue(StackValueType.NativeInt, intPtr, LLVM.BuildPointerCast(builder, targetMethod.GeneratedValue, intPtrLLVM, string.Empty)));
 }
Ejemplo n.º 39
0
        private void EmitLdlen(FunctionStack stack)
        {
            var array = stack.Pop();

            // Prepare indices
            var indices = new[]
            {
                LLVM.ConstInt(int32LLVM, 0, false),                         // Pointer indirection
                LLVM.ConstInt(int32LLVM, (int) ObjectFields.Data, false),   // Data
                LLVM.ConstInt(int32LLVM, 1, false),                         // Access length
            };

            // Force array type to be emitted
            GetType(array.Type.TypeReferenceCecil, TypeState.VTableEmitted);

            // Load data pointer
            var arraySizeLocation = LLVM.BuildInBoundsGEP(builder, array.Value, indices, string.Empty);
            var arraySize = LLVM.BuildLoad(builder, arraySizeLocation, string.Empty);

            // Add constant integer value to stack
            stack.Add(new StackValue(StackValueType.NativeInt, intPtr, arraySize));
        }
Ejemplo n.º 40
0
        private void EmitLdstr(FunctionStack stack, string operand)
        {
            var stringClass = GetClass(corlib.MainModule.GetType(typeof(string).FullName));

            var utf16String = operand.Select(x => LLVM.ConstInt(LLVM.Int16TypeInContext(context), x, false)); // string
            utf16String = utf16String.Concat(new[] { LLVM.ConstNull(LLVM.Int16TypeInContext(context)) }); // null-terminate

            var stringConstantData = LLVM.ConstArray(LLVM.Int16TypeInContext(context), utf16String.ToArray());

            var stringConstant = LLVM.ConstStructInContext(context, new[]
            {
                stringClass.GeneratedEETypeRuntimeLLVM,
                LLVM.ConstInt(int32LLVM, (ulong)operand.Length, false),
                stringConstantData
            }, false);

            var stringConstantGlobal = LLVM.AddGlobal(module, LLVM.TypeOf(stringConstant), ".string");
            LLVM.SetInitializer(stringConstantGlobal, stringConstant);
            LLVM.SetLinkage(stringConstantGlobal, Linkage.PrivateLinkage);

            // Push on stack
            stack.Add(new StackValue(StackValueType.Object, stringClass.Type, LLVM.ConstPointerCast(stringConstantGlobal, stringClass.Type.DefaultTypeLLVM)));
        }
Ejemplo n.º 41
0
        private void EmitStelem(FunctionStack stack)
        {
            var value = stack.Pop();
            var index = stack.Pop();
            var array = stack.Pop();

            // Force array type to be emitted
            GetType(array.Type.TypeReferenceCecil, TypeState.VTableEmitted);

            var indexValue = ConvertToNativeInt(index);

            // Get element type
            var elementType = GetType(((ArrayType)array.Type.TypeReferenceCecil).ElementType, TypeState.StackComplete);

            // Load array data pointer
            var arrayFirstElement = LoadArrayDataPointer(array);

            // Find pointer of element at requested index
            var arrayElementPointer = LLVM.BuildGEP(builder, arrayFirstElement, new[] { indexValue }, string.Empty);

            // Convert
            var convertedElement = ConvertFromStackToLocal(elementType, value);

            // Store element
            StoreValue(elementType.StackType, convertedElement, arrayElementPointer, InstructionFlags.None);
        }
Ejemplo n.º 42
0
 private void EmitI4(FunctionStack stack, int operandIndex)
 {
     // Add constant integer value to stack
     stack.Add(new StackValue(StackValueType.Int32, int32,
         LLVM.ConstInt(int32LLVM, (uint)operandIndex, true)));
 }
Ejemplo n.º 43
0
        private void EmitIsOrCastclass(FunctionCompilerContext functionContext, FunctionStack stack, Class @class, Code opcode, int instructionOffset)
        {
            var functionGlobal = functionContext.FunctionGlobal;

            var obj = stack.Pop();

            // Force emission of class to be sure we have RTTI type generated
            // Another option would be to cast everything to object before querying RTTI object
            var objClass = GetClass(obj.Type);

            var currentBlock = LLVM.GetInsertBlock(builder);

            // Prepare basic blocks (for PHI instruction)
            var typeIsNotNullBlock = LLVM.AppendBasicBlockInContext(context, functionGlobal, string.Format("L_{0:x4}_type_not_null", instructionOffset));
            var typeNotMatchBlock = LLVM.AppendBasicBlockInContext(context, functionGlobal, string.Format("L_{0:x4}_type_not_match", instructionOffset));
            var typeCheckDoneBlock = LLVM.AppendBasicBlockInContext(context, functionGlobal, string.Format("L_{0:x4}_type_check_done", instructionOffset));

            // Properly order block for easy LLVM bitcode reading
            LLVM.MoveBasicBlockAfter(typeIsNotNullBlock, currentBlock);
            LLVM.MoveBasicBlockAfter(typeNotMatchBlock, typeIsNotNullBlock);
            LLVM.MoveBasicBlockAfter(typeCheckDoneBlock, typeNotMatchBlock);

            var isObjNonNull = LLVM.BuildICmp(builder, IntPredicate.IntNE, obj.Value, LLVM.ConstPointerNull(LLVM.TypeOf(obj.Value)), string.Empty);
            LLVM.BuildCondBr(builder, isObjNonNull, typeIsNotNullBlock, opcode == Code.Castclass ? typeCheckDoneBlock : typeNotMatchBlock);

            LLVM.PositionBuilderAtEnd(builder, typeIsNotNullBlock);

            // Get RTTI pointer
            var indices = new[]
            {
                LLVM.ConstInt(int32LLVM, 0, false), // Pointer indirection
                LLVM.ConstInt(int32LLVM, (int)ObjectFields.RuntimeTypeInfo, false), // Access RTTI
            };

            var rttiPointer = LLVM.BuildInBoundsGEP(builder, obj.Value, indices, string.Empty);
            rttiPointer = LLVM.BuildLoad(builder, rttiPointer, string.Empty);

            // castedPointerObject is valid only from typeCheckBlock
            var castedPointerType = LLVM.PointerType(@class.Type.ObjectTypeLLVM, 0);
            ValueRef castedPointerObject;

            BasicBlockRef typeCheckBlock;

            if (@class.Type.TypeReferenceCecil.Resolve().IsInterface)
            {
                // Cast as appropriate pointer type (for next PHI incoming if success)
                castedPointerObject = LLVM.BuildPointerCast(builder, obj.Value, castedPointerType, string.Empty);

                var inlineRuntimeTypeInfoType = LLVM.TypeOf(LLVM.GetParam(isInstInterfaceFunctionLLVM, 0));
                var isInstInterfaceResult = LLVM.BuildCall(builder, isInstInterfaceFunctionLLVM, new[]
                {
                    LLVM.BuildPointerCast(builder, rttiPointer, inlineRuntimeTypeInfoType, string.Empty),
                    LLVM.BuildPointerCast(builder, @class.GeneratedEETypeTokenLLVM, inlineRuntimeTypeInfoType, string.Empty),
                }, string.Empty);

                LLVM.BuildCondBr(builder, isInstInterfaceResult, typeCheckDoneBlock, typeNotMatchBlock);

                typeCheckBlock = LLVM.GetInsertBlock(builder);
            }
            else
            {
                // TODO: Probably better to rewrite this in C, but need to make sure depth will be inlined as constant
                // Get super type count
                // Get method stored in IMT slot
                indices = new[]
                {
                    LLVM.ConstInt(int32LLVM, 0, false), // Pointer indirection
                    LLVM.ConstInt(int32LLVM, (int)RuntimeTypeInfoFields.SuperTypeCount, false), // Super type count
                };

                typeCheckBlock = LLVM.AppendBasicBlockInContext(context, functionGlobal, string.Format("L_{0:x4}_type_check", instructionOffset));
                LLVM.MoveBasicBlockBefore(typeCheckBlock, typeNotMatchBlock);

                var superTypeCount = LLVM.BuildInBoundsGEP(builder, rttiPointer, indices, string.Empty);
                superTypeCount = LLVM.BuildLoad(builder, superTypeCount, string.Empty);

                var depthCompareResult = LLVM.BuildICmp(builder, IntPredicate.IntSGE, superTypeCount, LLVM.ConstInt(int32LLVM, (ulong)@class.Depth, false), string.Empty);
                LLVM.BuildCondBr(builder, depthCompareResult, typeCheckBlock, typeNotMatchBlock);

                // Start new typeCheckBlock
                LLVM.PositionBuilderAtEnd(builder, typeCheckBlock);

                // Get super types
                indices = new[]
                {
                    LLVM.ConstInt(int32LLVM, 0, false), // Pointer indirection
                    LLVM.ConstInt(int32LLVM, (int)RuntimeTypeInfoFields.SuperTypes, false), // Super types
                };

                var superTypes = LLVM.BuildInBoundsGEP(builder, rttiPointer, indices, string.Empty);
                superTypes = LLVM.BuildLoad(builder, superTypes, string.Empty);

                // Get actual super type
                indices = new[]
                {
                    LLVM.ConstInt(int32LLVM, (ulong)@class.Depth, false), // Pointer indirection
                };
                var superType = LLVM.BuildGEP(builder, superTypes, indices, string.Empty);
                superType = LLVM.BuildLoad(builder, superType, string.Empty);

                // Cast as appropriate pointer type (for next PHI incoming if success)
                castedPointerObject = LLVM.BuildPointerCast(builder, obj.Value, castedPointerType, string.Empty);

                // Compare super type in array at given depth with expected one
                var typeCompareResult = LLVM.BuildICmp(builder, IntPredicate.IntEQ, superType, LLVM.ConstPointerCast(@class.GeneratedEETypeRuntimeLLVM, intPtrLLVM), string.Empty);
                LLVM.BuildCondBr(builder, typeCompareResult, typeCheckDoneBlock, typeNotMatchBlock);
            }

            // Start new typeNotMatchBlock: set object to null and jump to typeCheckDoneBlock
            LLVM.PositionBuilderAtEnd(builder, typeNotMatchBlock);
            if (opcode == Code.Castclass)
            {
                // Create InvalidCastException object
                var invalidCastExceptionClass = GetClass(corlib.MainModule.GetType(typeof(InvalidCastException).FullName));
                EmitNewobj(functionContext, invalidCastExceptionClass.Type, invalidCastExceptionClass.Functions.Single(x => x.MethodReference.Name == ".ctor" && x.MethodReference.Parameters.Count == 0));
                var invalidCastException = stack.Pop();
                GenerateInvoke(functionContext, throwExceptionFunctionLLVM, new[] {LLVM.BuildPointerCast(builder, invalidCastException.Value, LLVM.TypeOf(LLVM.GetParam(throwExceptionFunctionLLVM, 0)), string.Empty)});
                LLVM.BuildUnreachable(builder);
            }
            else
            {
                LLVM.BuildBr(builder, typeCheckDoneBlock);
            }

            // Start new typeCheckDoneBlock
            LLVM.PositionBuilderAtEnd(builder, typeCheckDoneBlock);
            functionContext.BasicBlock = typeCheckDoneBlock;

            // Put back with appropriate type at end of stack
            ValueRef mergedVariable;
            if (opcode == Code.Castclass)
            {
                mergedVariable = LLVM.BuildPhi(builder, castedPointerType, string.Empty);
                LLVM.AddIncoming(mergedVariable,
                    new[] { castedPointerObject, LLVM.ConstPointerNull(castedPointerType) },
                    new[] { typeCheckBlock, currentBlock });
            }
            else
            {
                mergedVariable = LLVM.BuildPhi(builder, castedPointerType, string.Empty);
                LLVM.AddIncoming(mergedVariable,
                    new[] {castedPointerObject, LLVM.ConstPointerNull(castedPointerType)},
                    new[] {typeCheckBlock, typeNotMatchBlock});
            }
            stack.Add(new StackValue(obj.StackType, @class.Type, mergedVariable));
        }
Ejemplo n.º 44
0
        public static void CreateFunctionTree(string[] postfixArray, FunctionTree FTree)
        {
            FunctionStack fStk = new FunctionStack();

            FTree.varList = new List <FVar>();
            string f; int i = 0;

            Function temp1, temp2;
            bool     chk1, chk2;

            while (i < postfixArray.Length)
            {
                f = postfixArray[i];
                //Console.Write(" "+ f);

                if (f.Equals(FNeg.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FNeg.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FNeg(fStk.TopAndPop()));
                }
                else if (f.Equals(FAdd.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FAdd.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FAdd(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FSub.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FSub.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FSub(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FMul.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FMul.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    temp1 = fStk.TopAndPop();
                    chk1  = temp1.GetID().Equals(FCons.ID) && !fStk.Top().GetID().Equals(FCons.ID);

                    if (chk1 && fStk.Top().isTwoSidedFunction&& !((TwoSidedFunction)fStk.Top()).RHS().GetID().Equals(FCons.ID))
                    {
                        temp2 = ((TwoSidedFunction)fStk.Top()).RHS();
                        ((TwoSidedFunction)fStk.Top()).SetRHS(temp1);
                        fStk.Push(new FMul(temp2, fStk.TopAndPop(), 1));
                    }
                    else if (chk1 && fStk.Top().isVariable)
                    {
                        fStk.Push(new FMul(fStk.TopAndPop(), temp1, 1));
                    }
                    else
                    {
                        fStk.Push(new FMul(temp1, fStk.TopAndPop(), 1));
                    }
                }
                else if (f.Equals(FDiv.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FDiv.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FDiv(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FPow.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FPow.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FPow(fStk.TopAndPop(), fStk.TopAndPop(), 1));
                }
                else if (f.Equals(FExp.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FExp.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FExp(fStk.TopAndPop()));
                }
                else if (f.Equals(FLn.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FLn.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FLn(fStk.TopAndPop()));
                }
                // sin()
                else if (f.Equals(FSin.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FSin.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FSin(fStk.TopAndPop()));
                }
                else if (f.Equals(FCos.ID, StringComparison.CurrentCultureIgnoreCase) || f.Equals(FCos.Symbol, StringComparison.CurrentCultureIgnoreCase))
                {
                    fStk.Push(new FCos(fStk.TopAndPop()));
                }

                // add rest of the functions

                else if (IsConstant(f))                 // double number
                {
                    fStk.Push(new FCons(Convert.ToDouble(f)));
                }
                else
                {
                    FVar @var = FTree.FindVariable(f);
                    if (@var != null)
                    {
                        fStk.Push(@var);
                    }
                    else
                    {
                        FunctionTree temp;
                        FVar         tempVar;
                        if ((tempVar = ListHandler.ML_VariableList(MLGUI.GetVariable, f)) != null && tempVar.IsSet())
                        {
                            fStk.Push(new FCons(tempVar.GetValue()));
                        }
                        else if ((temp = ListHandler.ML_FunctionList(MLGUI.GetFunction, f)) != null)
                        {
                            fStk.Push(temp.rootNode);
                        }
                        else
                        {
                            fStk.Push(FTree.AddVariable(ListHandler.ML_VariableList(MLGUI.AddItem, f)));                          // variable
                        }
                    }
                }

                i++;
            }



            FTree.rootNode = fStk.TopAndPop();
        }