Пример #1
0
        /// <summary>
        /// Create network for forward sequence-to-sequence
        /// Sparse feature size = source sparse feature size + target sparse feature size
        /// Dense feature size: For first layer, (source dense feature size + target dense), for other layers, (source dense feature size + previous hidden layer size)
        /// </summary>
        /// <param name="hiddenLayersConfig"></param>
        /// <param name="outputLayerConfig"></param>
        /// <param name="TrainingSet"></param>
        /// <param name="featurizer"></param>
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            var srcDenseFeatureSize = featurizer.Seq2SeqAutoEncoder.GetTopHiddenLayerSize() * 2;
            var sparseFeatureSize   = TrainingSet.SparseFeatureSize;

            sparseFeatureSize += featurizer.Seq2SeqAutoEncoder.Config.SparseFeatureSize;
            Logger.WriteLine("Sparse Feature Format: [{0}][{1}] = {2}", TrainingSet.SparseFeatureSize, featurizer.Seq2SeqAutoEncoder.Config.SparseFeatureSize, sparseFeatureSize);

            HiddenLayerList = CreateLayers(hiddenLayersConfig);

            for (var i = 0; i < HiddenLayerList.Count; i++)
            {
                SimpleLayer layer = HiddenLayerList[i];
                layer.InitializeWeights(sparseFeatureSize, i == 0 ? (srcDenseFeatureSize + TrainingSet.DenseFeatureSize) : (srcDenseFeatureSize + HiddenLayerList[i - 1].LayerSize));
                layer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {layer.LayerSize}, sparse feature size = {layer.SparseFeatureSize}, dense feature size = {layer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            OutputLayer = CreateOutputLayer(outputLayerConfig, 0, (srcDenseFeatureSize + HiddenLayerList[HiddenLayerList.Count - 1].LayerSize));
            OutputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a Forward recurrent neural sequence-to-sequence network with {HiddenLayerList.Count} hidden layers");
        }
Пример #2
0
        private void ComputeMiddleLayers(Sequence pSequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer, RunningMode runningMode, int layerIdx)
        {
            var numStates = pSequence.States.Length;

            float[][] lastLayerOutputs = layersOutput[layerIdx - 1];


            //Computing forward RNN
            forwardLayer.Reset();
            for (var curState = 0; curState < numStates; curState++)
            {
                var state = pSequence.States[curState];
                forwardLayer.ForwardPass(state.SparseFeature, lastLayerOutputs[curState]);
                forwardCellList[layerIdx][curState] = forwardLayer.CopyNeuronTo(forwardCellList[layerIdx][curState]);
            }

            //Computing backward RNN
            backwardLayer.Reset();
            for (var curState = numStates - 1; curState >= 0; curState--)
            {
                var state = pSequence.States[curState];
                backwardLayer.ForwardPass(state.SparseFeature, lastLayerOutputs[curState]);
                backwardCellList[layerIdx][curState] = backwardLayer.CopyNeuronTo(backwardCellList[layerIdx][curState]);
            }


            //Merge forward and backward
            MergeForwardBackwardLayers(numStates, forwardLayer.LayerSize, layerIdx);
        }
Пример #3
0
        /// <summary>
        ///     Compute the output of bottom layer
        /// </summary>
        /// <param name="sequence"></param>
        /// <param name="forwardLayer"></param>
        /// <param name="backwardLayer"></param>
        /// <returns></returns>
        private void ComputeBottomLayer(Sequence sequence, SimpleLayer forwardLayer, SimpleLayer backwardLayer, RunningMode runningMode)
        {
            var numStates = sequence.States.Length;

            //Computing forward RNN
            forwardLayer.Reset();
            for (var curState = 0; curState < numStates; curState++)
            {
                var state = sequence.States[curState];
                forwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                forwardCellList[0][curState] = forwardLayer.CopyNeuronTo(forwardCellList[0][curState]);
            }


            //Computing backward RNN
            backwardLayer.Reset();
            for (var curState = numStates - 1; curState >= 0; curState--)
            {
                var state = sequence.States[curState];
                backwardLayer.ForwardPass(state.SparseFeature, state.DenseFeature.CopyTo());
                backwardCellList[0][curState] = backwardLayer.CopyNeuronTo(backwardCellList[0][curState]);
            }



            //Merge forward and backward
            MergeForwardBackwardLayers(numStates, forwardLayer.LayerSize, 0);
        }
Пример #4
0
        protected virtual List <SimpleLayer> CreateLayers(List <LayerConfig> hiddenLayersConfig)
        {
            var hiddenLayers = new List <SimpleLayer>();

            for (var i = 0; i < hiddenLayersConfig.Count; i++)
            {
                SimpleLayer layer = null;
                switch (hiddenLayersConfig[i].LayerType)
                {
                case LayerType.LSTM:
                    layer = new LSTMLayer(hiddenLayersConfig[i] as LSTMLayerConfig);
                    Logger.WriteLine("Create LSTM layer.");
                    break;

                case LayerType.DropOut:
                    layer = new DropoutLayer(hiddenLayersConfig[i] as DropoutLayerConfig);
                    Logger.WriteLine("Create Dropout layer.");
                    break;
                }

                hiddenLayers.Add(layer);
            }

            return(hiddenLayers);
        }
Пример #5
0
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            var forwardHiddenLayers  = CreateLayers(hiddenLayersConfig);
            var backwardHiddenLayers = CreateLayers(hiddenLayersConfig);

            for (var i = 0; i < hiddenLayersConfig.Count; i++)
            {
                SimpleLayer forwardLayer  = forwardHiddenLayers[i];
                SimpleLayer backwardLayer = backwardHiddenLayers[i];

                var denseFeatureSize = TrainingSet.DenseFeatureSize;
                if (i > 0)
                {
                    denseFeatureSize = forwardHiddenLayers[i - 1].LayerSize * 2;
                }

                forwardLayer.InitializeWeights(TrainingSet.SparseFeatureSize, denseFeatureSize);
                backwardLayer.InitializeWeights(TrainingSet.SparseFeatureSize, denseFeatureSize);

                forwardLayer.SetRunningMode(RunningMode.Training);
                backwardLayer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {forwardLayer.LayerSize}, sparse feature size = {forwardLayer.SparseFeatureSize}, dense feature size = {forwardLayer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            SimpleLayer outputLayer = CreateOutputLayer(outputLayerConfig, TrainingSet.SparseFeatureSize, forwardHiddenLayers[forwardHiddenLayers.Count - 1].LayerSize * 2);

            outputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a bi-directional recurrent neural network with {forwardHiddenLayers.Count} hidden layers. Forward and backward layers are concatnated.");
            InitCache(forwardHiddenLayers, backwardHiddenLayers, outputLayer);
        }
Пример #6
0
        protected SimpleLayer CreateOutputLayer(LayerConfig outputLayerConfig, int sparseFeatureSize, int denseFeatureSize)
        {
            SimpleLayer outputLayer = null;

            switch (outputLayerConfig.LayerType)
            {
            case LayerType.SampledSoftmax:
                Logger.WriteLine("Create sampled softmax layer as output layer");
                outputLayer = new SampledSoftmaxLayer(outputLayerConfig as SampledSoftmaxLayerConfig);
                outputLayer.InitializeWeights(0, denseFeatureSize);
                break;

            case LayerType.Softmax:
                Logger.WriteLine("Create softmax layer as output layer.");
                outputLayer = new SoftmaxLayer(outputLayerConfig as SoftmaxLayerConfig);
                outputLayer.InitializeWeights(sparseFeatureSize, denseFeatureSize);
                break;

            case LayerType.Simple:
                Logger.WriteLine("Create simple layer as output layer.");
                outputLayer = new SimpleLayer(outputLayerConfig as SimpleLayerConfig);
                outputLayer.InitializeWeights(sparseFeatureSize, denseFeatureSize);
                break;
            }
            outputLayer.LabelShortList = new List <int>();

            return(outputLayer);
        }
Пример #7
0
        public static ILayer Load(LayerType layerType, BinaryReader br, bool forTraining = false)
        {
            ILayer layer = null;

            switch (layerType)
            {
            case LayerType.LSTM:
                layer = new LSTMLayer();
                break;

            case LayerType.DropOut:
                layer = new DropoutLayer();
                break;

            case LayerType.Softmax:
                layer = new SoftmaxLayer();
                break;

            case LayerType.SampledSoftmax:
                layer = new SampledSoftmaxLayer();
                break;

            case LayerType.Simple:
                layer = new SimpleLayer();
                break;
            }

            layer.Load(br, layerType, forTraining);

            return(layer);
        }
Пример #8
0
        public override void LoadModel(string filename, bool bTrain = false)
        {
            Logger.WriteLine("Loading SimpleRNN model: {0}", filename);

            var sr = new StreamReader(filename);
            var br = new BinaryReader(sr.BaseStream);

            IsCRFTraining = br.ReadBoolean();

            //Create cells of each layer
            var       layerSize = br.ReadInt32();
            LayerType layerType = LayerType.None;

            HiddenLayerList = new List <SimpleLayer>();
            for (var i = 0; i < layerSize; i++)
            {
                layerType = (LayerType)br.ReadInt32();
                HiddenLayerList.Add(Load(layerType, br));

                SimpleLayer layer = HiddenLayerList[HiddenLayerList.Count - 1];
                if (bTrain)
                {
                    layer.SetRunningMode(RunningMode.Training);
                    layer.InitializeInternalTrainingParameters();
                }
                else
                {
                    layer.SetRunningMode(RunningMode.Test);
                }
            }

            Logger.WriteLine("Create output layer");
            layerType   = (LayerType)br.ReadInt32();
            OutputLayer = Load(layerType, br);

            if (bTrain)
            {
                OutputLayer.SetRunningMode(RunningMode.Training);
                OutputLayer.InitializeInternalTrainingParameters();
            }
            else
            {
                OutputLayer.SetRunningMode(RunningMode.Test);
            }

            if (IsCRFTraining)
            {
                Logger.WriteLine("Loading CRF tag trans weights...");
                CRFTagTransWeights = RNNHelper.LoadMatrix(br);
                crfLocker          = new object();
            }

            sr.Close();
        }
Пример #9
0
        public override void CreateNetwork(List <LayerConfig> hiddenLayersConfig, LayerConfig outputLayerConfig, DataSet <T> TrainingSet, Config featurizer)
        {
            HiddenLayerList = CreateLayers(hiddenLayersConfig);
            for (var i = 0; i < HiddenLayerList.Count; i++)
            {
                SimpleLayer layer = HiddenLayerList[i];
                layer.InitializeWeights(TrainingSet.SparseFeatureSize, i == 0 ? TrainingSet.DenseFeatureSize : HiddenLayerList[i - 1].LayerSize);
                layer.SetRunningMode(RunningMode.Training);

                Logger.WriteLine($"Create hidden layer {i}: size = {layer.LayerSize}, sparse feature size = {layer.SparseFeatureSize}, dense feature size = {layer.DenseFeatureSize}");
            }

            outputLayerConfig.LayerSize = TrainingSet.TagSize;
            OutputLayer = CreateOutputLayer(outputLayerConfig, TrainingSet.SparseFeatureSize, HiddenLayerList[HiddenLayerList.Count - 1].LayerSize);
            OutputLayer.SetRunningMode(RunningMode.Training);

            Logger.WriteLine($"Create a Forward recurrent neural network with {HiddenLayerList.Count} hidden layers");
        }
Пример #10
0
        public override PaintResult BeginPaint(PaintContext context, Point beginPoint)
        {
            if (context == null)
            {
                return(null);
            }

            if (_layers == null)
            {
                _layers = new List <LayerBase>();
            }
            else
            {
                _layers.Clear();
            }

            _paintContext = context;
            _geometry     = null;
            PaintResult result = new PaintResult();
            SimpleLayer layer  = new SimpleLayer();

            result.PaintLayerType = PaintLayerType.New;
            _geometry             = new Circle(new CircleStyle()
            {
                FirstPoint  = beginPoint,
                SecondPoint = beginPoint,
                LineBrush   = (context.PaintTool as CircleTool).LineBrush,
                FillBrush   = (context.PaintTool as CircleTool).FillBrush,
                LineWidth   = (context.PaintTool as CircleTool).LineWidth
            }, new CircleAction());

            layer.AddGeometry(_geometry);
            _layers.Add(layer);
            result.Layers = _layers;

            context.OperationLayers.Clear();
            _layers.ForEach(m => context.OperationLayers.Add(m));

            NotifyLayerGroup(context, result);

            return(result);
        }
Пример #11
0
        public override PaintResult BeginPaint(PaintContext context, Point beginPoint)
        {
            if (context == null)
            {
                return(null);
            }

            if (_layers == null)
            {
                _layers = new List <LayerBase>();
            }
            else
            {
                _layers.Clear();
            }

            _paintContext = context;
            PaintResult result = new PaintResult();

            result.PaintLayerType = PaintLayerType.New;

            SimpleLayer layer = new SimpleLayer();

            _geometry = new Text(new TextStyle()
            {
                FirstPoint  = beginPoint,
                SecondPoint = beginPoint,
                Font        = (context.PaintTool as TextTool).Font,
                Foreground  = (context.PaintTool as TextTool).Foreground,
            }, new TextAction());

            layer.AddGeometry(_geometry);
            _layers.Add(layer);
            result.Layers = _layers;

            context.OperationLayers.Clear();
            _layers.ForEach(m => context.OperationLayers.Add(m));

            NotifyLayerGroup(context, result);

            return(result);
        }
Пример #12
0
        public static SimpleLayer Load(LayerType layerType, BinaryReader br)
        {
            switch (layerType)
            {
            case LayerType.LSTM:
                return(LSTMLayer.Load(br, LayerType.LSTM));

            case LayerType.DropOut:
                return(DropoutLayer.Load(br, LayerType.DropOut));

            case LayerType.Softmax:
                return(SoftmaxLayer.Load(br, LayerType.Softmax));

            case LayerType.SampledSoftmax:
                return(SampledSoftmaxLayer.Load(br, LayerType.SampledSoftmax));

            case LayerType.Simple:
                return(SimpleLayer.Load(br, LayerType.Simple));
            }

            return(null);
        }
Пример #13
0
        public override void LoadModel(string filename, bool bTrain = false)
        {
            Logger.WriteLine(Logger.Level.info, "Loading bi-directional model: {0}", filename);

            using (var sr = new StreamReader(filename))
            {
                var br = new BinaryReader(sr.BaseStream);

                IsCRFTraining = br.ReadBoolean();
                var       layerSize = br.ReadInt32();
                LayerType layerType = LayerType.None;

                //Load forward layers from file
                forwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    forwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = forwardHiddenLayers[forwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                //Load backward layers from file
                backwardHiddenLayers = new List <SimpleLayer>();
                for (var i = 0; i < layerSize; i++)
                {
                    layerType = (LayerType)br.ReadInt32();
                    backwardHiddenLayers.Add(Load(layerType, br));

                    SimpleLayer layer = backwardHiddenLayers[backwardHiddenLayers.Count - 1];
                    if (bTrain)
                    {
                        layer.SetRunningMode(RunningMode.Training);
                        layer.InitializeInternalTrainingParameters();
                    }
                    else
                    {
                        layer.SetRunningMode(RunningMode.Test);
                    }
                }

                Logger.WriteLine("Create output layer");
                layerType   = (LayerType)br.ReadInt32();
                OutputLayer = Load(layerType, br);

                if (bTrain)
                {
                    OutputLayer.SetRunningMode(RunningMode.Training);
                    OutputLayer.InitializeInternalTrainingParameters();
                }
                else
                {
                    OutputLayer.SetRunningMode(RunningMode.Test);
                }

                if (IsCRFTraining)
                {
                    Logger.WriteLine("Loading CRF tag trans weights...");
                    CRFWeights = RNNHelper.LoadMatrix(br);
                }

                if (bTrain)
                {
                    InitCache(forwardHiddenLayers, backwardHiddenLayers, OutputLayer.CreateLayerSharedWegiths());
                }
            }
        }
Пример #14
0
        public void InitCache(List <SimpleLayer> s_forwardRNN, List <SimpleLayer> s_backwardRNN, SimpleLayer outputLayer)
        {
            forwardHiddenLayers  = s_forwardRNN;
            backwardHiddenLayers = s_backwardRNN;

            //Initialize output layer
            OutputLayer = outputLayer;

            forwardCellList  = new List <Neuron[]>();
            backwardCellList = new List <Neuron[]>();
            fErrLayers       = new List <float[][]>();
            bErrLayers       = new List <float[][]>();

            for (int i = 0; i < numOfLayers; i++)
            {
                var forwardCells  = new Neuron[MaxSeqLength];
                var backwardCells = new Neuron[MaxSeqLength];
                var fErrLayer     = new float[MaxSeqLength][];
                var bErrLayer     = new float[MaxSeqLength][];

                for (int j = 0; j < MaxSeqLength; j++)
                {
                    if (forwardHiddenLayers[i] is DropoutLayer)
                    {
                        forwardCells[j]  = new DropoutNeuron();
                        backwardCells[j] = new DropoutNeuron();

                        ((DropoutNeuron)forwardCells[j]).mask  = new bool[forwardHiddenLayers[i].LayerSize];
                        ((DropoutNeuron)backwardCells[j]).mask = new bool[forwardHiddenLayers[i].LayerSize];
                    }
                    else if (forwardHiddenLayers[i] is LSTMLayer)
                    {
                        var lstmForwardCell  = new LSTMNeuron();
                        var lstmBackwardCell = new LSTMNeuron();

                        lstmForwardCell.LSTMCells  = new LSTMCell[forwardHiddenLayers[i].LayerSize];
                        lstmBackwardCell.LSTMCells = new LSTMCell[forwardHiddenLayers[i].LayerSize];

                        for (int k = 0; k < forwardHiddenLayers[i].LayerSize; k++)
                        {
                            lstmForwardCell.LSTMCells[k]  = new LSTMCell();
                            lstmBackwardCell.LSTMCells[k] = new LSTMCell();
                        }

                        forwardCells[j]  = lstmForwardCell;
                        backwardCells[j] = lstmBackwardCell;
                    }
                    else
                    {
                        forwardCells[j]  = new Neuron();
                        backwardCells[j] = new Neuron();
                    }

                    forwardCells[j].Cells           = new float[forwardHiddenLayers[i].LayerSize];
                    forwardCells[j].PrevCellOutputs = new float[forwardHiddenLayers[i].LayerSize];

                    backwardCells[j].Cells           = new float[forwardHiddenLayers[i].LayerSize];
                    backwardCells[j].PrevCellOutputs = new float[forwardHiddenLayers[i].LayerSize];

                    fErrLayer[j] = new float[forwardHiddenLayers[i].LayerSize];
                    bErrLayer[j] = new float[forwardHiddenLayers[i].LayerSize];
                }


                forwardCellList.Add(forwardCells);
                backwardCellList.Add(backwardCells);
                fErrLayers.Add(fErrLayer);
                bErrLayers.Add(bErrLayer);
            }

            InitOutputLayerCache();
        }