Exemplo n.º 1
0
        public void GEMM(Tensor A, Tensor B, float alpha, float beta)
        {
            int m = A.Dimensions[0];
            int n = B.Dimensions[1];
            int k = A.Dimensions[1];
            int lda = m, ldb = k, ldc = m;

            MKLCBLAS.cblas_sgemm(
                MKLCBLAS.Order.ColMajor,
                MKLCBLAS.Transpose.None,
                MKLCBLAS.Transpose.None,
                m, n, k,
                alpha, A.memPtr, lda,
                B.memPtr, ldb,
                beta, this.memPtr, ldc);
        }
Exemplo n.º 2
0
        public CNN(string path)
        {
            bool useMKLBLAS = MKLCBLAS.isAvailable();

            //if(useMKLCBLAS)
            //    MKLCBLAS.setNumThreads(Environment.ProcessorCount);

            FileStream   f  = null;
            BinaryReader br = null;

            try
            {
                f  = new FileStream(path, FileMode.Open);
                br = new BinaryReader(f, Encoding.ASCII, false);
                char[] c = br.ReadChars(25);
                if (!(new string(c)).Equals(CeNiN_FILE_HEADER))
                {
                    throw new Exception("Invalid file header!");
                }

                layerCount = br.ReadInt32();
                int[] inputSize = new int[3];
                for (int i = 0; i < 3; i++)
                {
                    inputSize[i] = br.ReadInt32();
                }

                inputLayer = new Input(inputSize);

                for (int i = 0; i < 3; i++)
                {
                    inputLayer.avgPixel[i] = br.ReadSingle();
                }

                inputLayer.setOutputDims();

                Layer layerChain   = inputLayer;
                Layer currentLayer = inputLayer;

                totalWeightCount = 0;
                totalBiasCount   = 0;

                List <Layer> layerList = new List <Layer>();
                layerList.Add(currentLayer);

                bool endOfFile = false;
                while (!endOfFile)
                {
                    string layerT = br.ReadString();
                    if (layerT.Equals("conv"))
                    {
                        int[] pad = new int[4];
                        for (int i = 0; i < 4; i++)
                        {
                            pad[i] = br.ReadByte();
                        }

                        int[] inputTensorDims = currentLayer.outputDims;
                        Conv  cLayer          = new Conv(inputTensorDims, pad);
                        cLayer.useCBLAS = useMKLBLAS;

                        int[] dims = new int[4];
                        for (int i = 0; i < 4; i++)
                        {
                            dims[i] = br.ReadInt32();
                        }

                        for (int i = 0; i < 2; i++)
                        {
                            cLayer.stride[i] = br.ReadByte();
                        }

                        cLayer.weights = new Tensor(dims);
                        for (int i = 0; i < cLayer.weights.TotalLength; i++)
                        {
                            cLayer.weights.memPtr[i] = br.ReadSingle();
                        }
                        totalWeightCount += cLayer.weights.TotalLength;

                        cLayer.biases = new Tensor(new int[] { dims[3] });
                        for (int i = 0; i < cLayer.biases.TotalLength; i++)
                        {
                            cLayer.biases.memPtr[i] = br.ReadSingle();
                        }
                        totalBiasCount += cLayer.biases.TotalLength;

                        cLayer.setOutputDims();

                        currentLayer = cLayer;
                    }
                    else if (layerT.Equals("relu"))
                    {
                        ReLU rLayer = new ReLU(currentLayer.outputDims);
                        rLayer.setOutputDims();
                        currentLayer = rLayer;
                    }
                    else if (layerT.Equals("pool"))
                    {
                        int[] pad = new int[4];
                        for (int i = 0; i < 4; i++)
                        {
                            pad[i] = br.ReadByte();
                        }

                        Pool pLayer = new Pool(currentLayer.outputDims, pad);

                        for (int i = 0; i < 2; i++)
                        {
                            pLayer.pool[i] = br.ReadByte();
                        }

                        for (int i = 0; i < 2; i++)
                        {
                            pLayer.stride[i] = br.ReadByte();
                        }

                        pLayer.setOutputDims();
                        currentLayer = pLayer;
                    }
                    else if (layerT.Equals("softmax"))
                    {
                        classCount = br.ReadInt32();
                        string[] classes = new string[classCount];
                        for (int i = 0; i < classCount; i++)
                        {
                            classes[i] = br.ReadString();
                        }

                        SoftMax smLayer = new SoftMax(currentLayer.outputDims);
                        currentLayer.appendNext(smLayer);
                        outputLayer = new Output(smLayer.InputTensorDims, classes);
                        smLayer.appendNext(outputLayer);
                        layerList.Add(smLayer);
                        layerList.Add(outputLayer);
                        continue;
                    }
                    else if (layerT.Equals("EOF"))
                    {
                        endOfFile = true;
                        continue;
                    }
                    else
                    {
                        throw new Exception("The following layer is not implemented: " + layerT);
                    }

                    layerList.Add(currentLayer);
                    layerChain.appendNext(currentLayer);
                    layerChain = layerChain.nextLayer;
                }
                layers = layerList.ToArray();
            }
            catch (Exception e)
            {
            }
            finally
            {
                if (br != null)
                {
                    br.Close();
                }
                if (f != null)
                {
                    f.Close();
                }
            }
        }