Пример #1
0
        private static void PackConstants(Model model, Dictionary <string, Layer> constantLayers)
        {
            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];

                if (!LinearLayerFusing.IsLayerLinearMathOp(layer))
                {
                    continue;
                }
                var constInputs = layer.inputs.Count(x => constantLayers.ContainsKey(x));
                // @TODO fuse multi const inputs here
                if (!(layer.inputs.Length == 2 && constInputs == 1))
                {
                    continue;
                }

                var constInput = layer.inputs.ToList().Find(x => constantLayers.ContainsKey(x));

                layer.datasets = new Layer.DataSet[constantLayers[constInput].datasets.Length];
                Array.Copy(constantLayers[constInput].datasets, layer.datasets, constantLayers[constInput].datasets.Length);
                layer.weights = new BarracudaArray(constantLayers[constInput].weights.Length);
                BarracudaArray.Copy(constantLayers[constInput].weights, layer.weights, constantLayers[constInput].weights.Length);

                model.layers[l].inputs = layer.inputs.Where(x => x != constInput).ToArray();
            }
        }
Пример #2
0
        /// <summary>
        /// Converts Tensor to DataSet
        /// </summary>
        /// <param name="X">input `Tensor`</param>
        /// <param name="index">dataset index</param>
        public void ApplyTensorToDataSet(Tensor X, int index)
        {
            Assert.IsTrue(index < datasets.Length);
            var ds = datasets[index];

            ds.shape = X.shape;
            BarracudaArray.Copy(X.ToReadOnlyArray(), 0, weights, ds.offset, ds.shape.length);
            datasets[index] = ds;
        }
Пример #3
0
 /// <summary>
 /// Convert in place all model weights to given data type
 /// </summary>
 /// <param name="type">target type for moodel weights</param>
 internal void ConvertWeights(DataType type)
 {
     foreach (var layer in layers)
     {
         if (layer.weights != null && layer.weights.Type != type)
         {
             var sourceWeights = layer.weights;
             var targetWeights = new BarracudaArray(layer.weights.Length, type);
             BarracudaArray.Copy(sourceWeights, targetWeights);
             layer.weights = targetWeights;
         }
     }
 }
Пример #4
0
        private static void UnpackConstants(Model model)
        {
            List <Layer> newConstants = new List <Layer>();

            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];
                if (!LinearLayerFusing.IsLayerLinearMathOp(layer))
                {
                    continue;
                }

                if (layer.datasets == null || layer.datasets.Length != 1)
                {
                    continue;
                }

                var   name       = "c" + layer.name;
                Layer constInput = new Layer(name, Layer.Type.Load);

                constInput.datasets = new Layer.DataSet[layer.datasets.Length];
                Array.Copy(layer.datasets, constInput.datasets, layer.datasets.Length);
                for (int d = 0; d < constInput.datasets.Length; ++d)
                {
                    constInput.datasets[d].name = name;
                }

                constInput.weights = new BarracudaArray(layer.weights.Length);
                BarracudaArray.Copy(layer.weights, constInput.weights, layer.weights.Length);

                Array.Resize(ref layer.inputs, layer.inputs.Length + 1);
                layer.inputs[layer.inputs.Length - 1] = constInput.name;

                newConstants.Add(constInput);

                layer.datasets = new Layer.DataSet[0];
                layer.weights  = new BarracudaArray(0);//TODO fp16
            }
            newConstants.AddRange(model.layers);
            model.layers = newConstants;
        }