Ejemplo n.º 1
0
        /// <summary>
        /// Rewrites a protobuf file to another file, with specified modfications.
        /// </summary>
        /// <param name="fnOrg"></param>
        /// <param name="fnRewritten"></param>
        /// <param name="layerMap"></param>
        /// <param name="valueCalc"></param>
        public static void RewriteLayerInNet(string fnOrg, string fnRewritten,
                                             Func <LC0ProtobufNet, Weights.Layer> layerMap,
                                             Func <int, float, float> valueCalc)
        {
            // Read from disk
            LC0ProtobufNet pbn = new LC0ProtobufNet(fnOrg);

            Weights.Layer layer = layerMap(pbn);

            // Get current layer values and get them rewritten
            float[] values = ProtobufHelpers.GetLayerLinear16(layer);
            for (int i = 0; i < values.Length; i++)
            {
                values[i] = valueCalc(i, values[i]);
            }

            // Set value of this layer and serialize whole net to bytes
            ProtobufHelpers.SetLayerLinear16(layer, values);
            byte[] bytes = SerializationUtils.ProtoSerialize <Net>(pbn.Net);

            // Write to disk
            if (File.Exists(fnRewritten))
            {
                File.Delete(fnRewritten);
            }
            File.WriteAllBytes(fnRewritten, bytes);
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Gets a single layer vector within a speciifed layer.
        /// </summary>
        /// <param name="layer"></param>
        /// <param name="index"></param>
        /// <returns></returns>
        public static float GetLayerLinear16Single(Weights.Layer layer, int index)
        {
            byte[] b = new byte[2];
            b[0] = layer.Params[index * 2];
            b[1] = layer.Params[index * 2 + 1];
            float v1  = 256 * b[1] + b[0];
            float v1a = layer.MinVal + v1 * (layer.MaxVal - layer.MinVal) / 65536.0f;

            return(v1a);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Retrieves the weights values from a specified layer.
        /// </summary>
        /// <param name="layer"></param>
        /// <returns></returns>
        public static float[] GetLayerLinear16(Weights.Layer layer)
        {
            float[] ret = new float[layer.Params.Length / 2];

            for (int i = 0; i < ret.Length; i++)
            {
                ret[i] = GetLayerLinear16Single(layer, i);
            }
            return(ret);
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Sets a layer vector within a specified layer.
        /// </summary>
        /// <param name="layer"></param>
        /// <param name="values"></param>
        public static void SetLayerLinear16(Weights.Layer layer, float[] values)
        {
            if (layer.Params.Length != values.Length * 2)
            {
                throw new System.Exception("not expected size");
            }

            for (int i = 0; i < values.Length; i++)
            {
                SetLayerLinear16(layer, i, values[i]);
            }
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Sets the weights values in a specified layer.
        /// </summary>
        /// <param name="layer"></param>
        /// <param name="index"></param>
        /// <param name="value"></param>
        public static void SetLayerLinear16(Weights.Layer layer, int index, float value)
        {
            if (value < layer.MinVal)
            {
                value = layer.MinVal;
            }
            if (value > layer.MaxVal)
            {
                value = layer.MaxVal;
            }

            float width     = (layer.MaxVal - layer.MinVal) / 65536.0f;
            float offset    = value - layer.MinVal;
            float increment = MathF.Round(offset / width, 0);
            byte  b0        = (byte)(increment % 256);
            byte  b1        = (byte)(increment / 256);

            layer.Params[index * 2]     = b0;
            layer.Params[index * 2 + 1] = b1;
        }