コード例 #1
0
        private static void PackConstantsForMathOps(Model model, Dictionary <string, Layer> constantLayers)
        {
            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];

                if (!LinearLayerFusing.IsLayerLinearMathOp(layer))
                {
                    continue;
                }
                var constInputs = layer.inputs.Count(x => constantLayers.ContainsKey(x));
                // @TODO fuse multi const inputs here
                if (!(layer.inputs.Length == 2 && constInputs == 1))
                {
                    continue;
                }

                var constInput = layer.inputs.ToList().Find(x => constantLayers.ContainsKey(x));

                layer.datasets = new Layer.DataSet[constantLayers[constInput].datasets.Length];
                Array.Copy(constantLayers[constInput].datasets, layer.datasets, constantLayers[constInput].datasets.Length);
                layer.weights = new BarracudaArray(constantLayers[constInput].weights.Length);
                BarracudaArray.Copy(constantLayers[constInput].weights, layer.weights, constantLayers[constInput].weights.Length);

                layer.axis = constantLayers[constInput].axis; // rank TODO name correctly

                model.layers[l].inputs = layer.inputs.Where(x => x != constInput).ToArray();
            }
        }
コード例 #2
0
        private static void UnpackConstantsForMathOps(Model model)
        {
            List <Layer> newConstants = new List <Layer>();

            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];
                if (!LinearLayerFusing.IsLayerLinearMathOp(layer))
                {
                    continue;
                }

                if (layer.datasets == null || layer.datasets.Length != 1)
                {
                    continue;
                }

                var   name       = "c" + layer.name;
                Layer constInput = new Layer(name, Layer.Type.Load);

                constInput.datasets = new Layer.DataSet[layer.datasets.Length];
                Array.Copy(layer.datasets, constInput.datasets, layer.datasets.Length);
                for (int d = 0; d < constInput.datasets.Length; ++d)
                {
                    constInput.datasets[d].name = name;
                }

                constInput.weights = new BarracudaArray(layer.weights.Length);
                BarracudaArray.Copy(layer.weights, constInput.weights, layer.weights.Length);

                constInput.axis = layer.axis; // rank TODO rename

                Array.Resize(ref layer.inputs, layer.inputs.Length + 1);
                layer.inputs[layer.inputs.Length - 1] = constInput.name;

                newConstants.Add(constInput);

                layer.datasets = new Layer.DataSet[0];
                layer.weights  = new BarracudaArray(0);//TODO fp16
            }
            newConstants.AddRange(model.layers);
            model.layers = newConstants;
        }
コード例 #3
0
        private void FuseShapesIntoConstants(ref Model model, IDictionary <string, TensorShape?> shapesByName, IDictionary <string, int?> ranksByName, ref List <Model.ImporterWarning> warnings)
        {
            var toRunnableNCHW = new IntermediateToRunnableNCHWPass();

            var knownLayersValue = new Dictionary <string, Tensor>();
            var newKnownLayers   = new HashSet <string>();
            var keepLayers       = new HashSet <string>();

            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];
                if (layer.flags == Layer.Flags.Preserve)
                {
                    keepLayers.Add(layer.name);
                }

                // NN is a directed graph, if we just fused constants + shapes, update following nodes
                // re-evaluate shapes
                FuseInputsIntoLayer(ref layer, knownLayersValue, ranksByName, warnings);
                // TODO optimization, pass in index, or add shape
                IRShapeInferenceHelper.RankInference.UpdateKnownTensorRanks(model, ranksByName);
                IRShapeInferenceHelper.ShapeInference.UpdateKnownTensorShapesNCHW(model, ranksByName, ref shapesByName);

                if (ModelOptimizer.IsLayerConstant(layer))
                {
                    knownLayersValue[layer.name] = new Tensor(layer.datasets[0].shape, layer.weights);
                }
                else if (layer.type == Layer.Type.Shape)
                {
                    // assert inputs.Lenght == 1
                    var input = layer.inputs[0];
                    if (shapesByName.ContainsKey(input) && shapesByName[input] != null &&
                        ranksByName.ContainsKey(input) && ranksByName[input] != null
                        )
                    {
                        var shape = shapesByName[input].Value;
                        var rank  = ranksByName[input].Value;
                        knownLayersValue[layer.name] = ShapeToNCHWTensor(shape, rank);
                        newKnownLayers.Add(layer.name);
                        continue;
                    }
                }

                bool allInputsAreKnown = layer.inputs.Length > 0 ? knownLayersValue.ContainsKey(layer.inputs[0]) : false;
                for (int i = 1; i < layer.inputs.Length; i++)
                {
                    allInputsAreKnown &= knownLayersValue.ContainsKey(layer.inputs[i]);
                }

                // if all inputs are known, execute layer
                if (!allInputsAreKnown)
                {
                    continue;
                }

                var layerInputs = new Dictionary <string, Tensor>();
                var opsModel    = new Model();
                opsModel.layout = "iNCHW";
                for (int i = 0; i < layer.inputs.Length; i++)
                {
                    Model.Input input;
                    input.name  = layer.inputs[i];
                    input.shape = shapesByName[input.name].Value.ToArray();
                    input.rank  = ranksByName[input.name].Value;

                    opsModel.inputs.Add(input);
                    layerInputs[input.name] = knownLayersValue[input.name];
                }
                Layer newLayer = new Layer(layer.name.ToString(), layer.activation);
                newLayer.type       = layer.type;
                newLayer.activation = layer.activation;
                newLayer.pad        = layer.pad.ToArray();
                newLayer.stride     = layer.stride.ToArray();
                newLayer.pool       = layer.pool.ToArray();
                newLayer.axis       = layer.axis;
                newLayer.alpha      = layer.alpha;
                newLayer.beta       = layer.beta;
                newLayer.inputs     = layer.inputs.ToArray();
                newLayer.datasets   = layer.datasets;
                newLayer.weights    = layer.weights;
                if (layer.outputs != null)
                {
                    newLayer.outputs = layer.outputs.ToArray();
                }
                if (layer.axes != null)
                {
                    newLayer.axes = layer.axes.ToArray();
                }


                opsModel.layers.Add(newLayer);
                opsModel.outputs.Add(newLayer.name);

                toRunnableNCHW.Run(ref opsModel);

                // bake
                var useCPUforBaking = WorkerFactory.Device.CPU;
                using (var worker = WorkerFactory.CreateWorker(opsModel, useCPUforBaking))
                {
                    var bakedConstant = worker.Execute(layerInputs).CopyOutput();
                    knownLayersValue[layer.name] = bakedConstant;
                    newKnownLayers.Add(layer.name);
                }
            }

            // remove new baked layers since we will insert constants for those
            model.layers.RemoveAll(x => newKnownLayers.Contains(x.name) && !keepLayers.Contains(x.name));

            // TODO use ModelBuilder?
            foreach (var l in newKnownLayers)
            {
                if (keepLayers.Contains(l))
                {
                    continue;
                }

                var   name   = l;
                var   tensor = knownLayersValue[name];
                Layer c      = new Layer(name, Layer.Type.Load);

                c.datasets                    = new Layer.DataSet[1];
                c.datasets[0].name            = name;
                c.datasets[0].shape           = tensor.shape;
                c.datasets[0].itemSizeInBytes = 4;
                c.datasets[0].length          = tensor.shape.length;
                c.datasets[0].offset          = 0;

                c.axis = ranksByName[c.name].Value;

                c.weights = new BarracudaArray(tensor.length);
                BarracudaArray.Copy(tensor.ToReadOnlyArray(), c.weights, tensor.length);
                model.layers.Insert(0, c);
            }

            foreach (var l in knownLayersValue)
            {
                l.Value.Dispose();
            }

            // TODO remove?
            // remove unused constants
            var removeUnusedLayersPass = new Cleanup.RemoveUnusedLayersPass();

            removeUnusedLayersPass.Run(ref model);
        }
コード例 #4
0
        public static void FuseConstants(ref Model model)
        {
            var knownLayersValue = new Dictionary <string, Tensor>();
            var newKnownLayers   = new HashSet <string>();
            var keepLayers       = new HashSet <string>();

            for (int l = 0; l < model.layers.Count; ++l)
            {
                var layer = model.layers[l];
                if (layer.flags == Layer.Flags.Preserve)
                {
                    keepLayers.Add(layer.name);
                }

                // NN is a directed graph, if we just fused constants + shapes, update following nodes
                // TODO optimization, pass in index, or add shape
                if (ModelOptimizer.IsLayerConstant(layer))
                {
                    knownLayersValue[layer.name] = new Tensor(layer.datasets[0].shape, layer.weights);
                }

                bool allInputsAreKnown = layer.inputs.Length > 0 ? knownLayersValue.ContainsKey(layer.inputs[0]) : false;
                for (int i = 1; i < layer.inputs.Length; i++)
                {
                    allInputsAreKnown &= knownLayersValue.ContainsKey(layer.inputs[i]);
                }

                // if all inputs are known, execute layer
                if (!allInputsAreKnown)
                {
                    continue;
                }

                var layerInputs = new Dictionary <string, Tensor>();
                var opsModel    = new Model();
                for (int i = 0; i < layer.inputs.Length; i++)
                {
                    Model.Input input;
                    input.name  = layer.inputs[i];
                    input.shape = knownLayersValue[input.name].shape.ToArray();
                    input.rank  = knownLayersValue[input.name].shape.dimensions;

                    opsModel.inputs.Add(input);
                    layerInputs[input.name] = knownLayersValue[input.name];
                }
                opsModel.layers.Add(layer);
                opsModel.outputs.Add(layer.name);

                // bake
                var useCPUforBaking = WorkerFactory.Device.CPU;
                using (var worker = WorkerFactory.CreateWorker(opsModel, useCPUforBaking))
                {
                    // TODO use ModelIR2RunnableNCHWPass
                    var bakedConstant = worker.Execute(layerInputs).PeekOutput();
                    bakedConstant.TakeOwnership();
                    knownLayersValue[layer.name] = bakedConstant;
                    newKnownLayers.Add(layer.name);
                }
            }

            // remove new baked layers since we will insert constants for those
            model.layers.RemoveAll(x => newKnownLayers.Contains(x.name) && !keepLayers.Contains(x.name));

            // TODO use ModelBuilder?
            foreach (var l in newKnownLayers)
            {
                if (keepLayers.Contains(l))
                {
                    continue;
                }

                var   name   = l;
                var   tensor = knownLayersValue[name];
                Layer c      = new Layer(name, Layer.Type.Load);

                c.datasets                    = new Layer.DataSet[1];
                c.datasets[0].name            = name;
                c.datasets[0].shape           = tensor.shape;
                c.datasets[0].itemSizeInBytes = 4;
                c.datasets[0].length          = tensor.shape.length;
                c.datasets[0].offset          = 0;

                c.axis = tensor.shape.dimensions;

                c.weights = new BarracudaArray(tensor.length);
                BarracudaArray.Copy(tensor.ToReadOnlyArray(), c.weights, tensor.length);
                model.layers.Insert(0, c);
            }

            // clear allocated tensors
            foreach (var l in knownLayersValue)
            {
                l.Value.Dispose();
            }

            // remove unused constants
            var removeUnusedLayersPass = new Cleanup.RemoveUnusedLayersPass();

            removeUnusedLayersPass.Run(ref model);
        }
コード例 #5
0
 public static void CopyToBarracudaArray(this float[] sourceArray, BarracudaArray destinationArray, long destinationIndex)
 {
     BarracudaArray.Copy(sourceArray, 0, destinationArray, (int)destinationIndex, sourceArray.Length);
 }