Пример #1
0
        public void TestAddCUDA()
        {
            //Alloc device memory
            int    count_rows_a           = 2;
            int    count_shared           = 2;
            int    count_cols_b           = 2;
            double alfa                   = 1.0;
            double beta                   = 1.0;
            CudaDeviceVariable <double> A = new double[] { 0, 0, 0, 0 };
            CudaDeviceVariable <double> B = new double[] { 0, 0, 0, 0 };
            CudaDeviceVariable <double> C = new double[] { 0, 0, 0, 0 };

            //Clean up
            CudaBlas blas = new CudaBlas();

            blas.Gemm(Operation.NonTranspose, Operation.NonTranspose,
                      count_rows_a, count_shared, count_cols_b,
                      alfa,
                      A, count_rows_a,
                      B, count_shared,
                      beta,
                      C, count_rows_a);


            //Copy data back to host
            double[] result = C;

            ToolsArray.print(result);
            Assert.AreEqual(0, result[0], 0.001);
            // Um right this should be moved
            blas.Dispose();
        }
        public void LearnXORTest()
        {
            float [][] inputs  = new float [4][];
            float[][]  targets = new float [4][];
            inputs[0]  = new float[] { 0, 0 };
            inputs[1]  = new float[] { 0, 1 };
            inputs[2]  = new float[] { 1, 0 };
            inputs[3]  = new float[] { 1, 1 };
            targets[0] = new float[] { 0 };
            targets[1] = new float[] { 1 };
            targets[2] = new float[] { 1 };
            targets[3] = new float[] { 0 };
            MultiLayerPerceptron mlp = new MultiLayerPerceptron(new int [] { 2, 2, 1 });

            mlp.set_learning_rate(0.2f);
            mlp.set_eligibility(0.1f);
            for (int i = 0; i < 10000; i++)
            {
                mlp.train_batch(inputs, targets);
            }
            ToolsArray.print(mlp.estimate(inputs[0]));
            ToolsArray.print(mlp.estimate(inputs[1]));
            ToolsArray.print(mlp.estimate(inputs[2]));
            ToolsArray.print(mlp.estimate(inputs[3]));
        }
Пример #3
0
 public BigInteger[,] KeyArray()
 {
     return(ToolsArray.Copy(d_public_key));
 }