示例#1
0
 public FPNFeatureExpander(HybridBlock network, string[] outputs, int[] num_filters, bool use_1x1 = true, bool use_upsample = true,
                           bool use_elewadd = true, bool use_p6        = false, bool p6_conv = true, bool no_bias       = true,
                           bool pretrained  = false, string norm_layer = null, FuncArgs norm_kwargs = null, Context ctx = null, string[] inputs = null, ParameterDict @params = null) : base(null, null, @params)
 {
     var(p_i, p_o, p_p) = __internal__.ParseNetwork(network, outputs, inputs, pretrained, ctx);
     Construct(p_o, p_i, p_p, num_filters, use_1x1, use_upsample, use_elewadd, use_p6, p6_conv, no_bias, pretrained, norm_layer, norm_kwargs, ctx);
 }
示例#2
0
        public static (Symbol, SymbolList, ParameterDict) ParseNetwork(HybridBlock network, string[] outputs, string[] inputs, bool pretrained, Context ctx)
        {
            var    inputSymbols = inputs.Select(x => (Symbol.Var(x))).ToArray();
            Symbol input        = null;

            if (inputSymbols.Length == 1)
            {
                input = inputSymbols[0];
            }
            else
            {
                input = Symbol.Group(inputSymbols);
            }

            ParameterDict @params = network.CollectParams();
            var           prefix  = network.Prefix;

            input = network.Call(input);

            Debug.Assert(outputs.Length > 0, "At least one outputs must be specified.");
            outputs = (from @out in outputs
                       select @out.EndsWith("_output") ? @out : @out + "_output").ToArray();
            var outputSymbols = (from @out in outputs
                                 select input[prefix + @out]).ToArray();

            return(input, outputSymbols, @params);
        }
        public FeatureExtractor(HybridBlock network, string[] outputs, string[] inputs = null, bool pretrained = false, Context ctx = null, ParameterDict @params = null) : base(null, null, @params)
        {
            if (ctx == null)
            {
                ctx = mx.Cpu();
            }

            var(p_i, p_o, p_p) = __internal__.ParseNetwork(network, outputs, inputs, pretrained, ctx);
            base.Construct(p_i, p_o, p_p);
        }
示例#4
0
 public FasterRCNN(
     HybridBlock features,
     HybridBlock top_features,
     string[] classes,
     HybridBlock box_features = null,
     int @short                = 600,
     int max_size              = 1000,
     int min_stage             = 4,
     int max_stage             = 4,
     string train_patterns     = "",
     float nms_thresh          = 0.3f,
     int nms_topk              = 400,
     int post_nms              = 100,
     string roi_mode           = "align",
     (int, int)?roi_size       = null,
示例#5
0
 public BBoxArea(int axis = -1, string fmt = "corner", string prefix = "", ParameterDict @params = null) : base(prefix, @params)
 {
     if (fmt.ToLower() == "corner")
     {
         this._pre = new BBoxCornerToCenter(split: true);
     }
     else if (fmt.ToLower() == "center")
     {
         this._pre = new BBoxSplit(axis: axis);
     }
     else
     {
         throw new Exception($"Unsupported format: {fmt}. Use 'corner' or 'center'.");
     }
 }
示例#6
0
 public BBoxBatchIOU(int axis = -1, string fmt = "corner", int offset = 0, float eps = 1e-15f, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     this._offset = offset;
     this._eps    = eps;
     if (fmt.ToLower() == "center")
     {
         this._pre = new BBoxCenterToCorner(split: true);
     }
     else if (fmt.ToLower() == "corner")
     {
         this._pre = new BBoxSplit(axis: axis, squeeze_axis: true);
     }
     else
     {
         throw new Exception($"Unsupported format: {fmt}. Use 'corner' or 'center'.");
     }
 }
        public FeatureExpander(HybridBlock network, string[] outputs, int[] num_filters, bool use_1x1_transition = true,
                               bool use_bn     = true, float reduce_ratio = 1, int min_depth      = 128, bool global_pool       = false,
                               bool pretrained = false, Context ctx       = null, string[] inputs = null, ParameterDict @params = null) : base(null, null, @params)
        {
            var(p_i, p_o, p_p) = __internal__.ParseNetwork(network, outputs, inputs, pretrained, ctx);
            // append more layers
            var y           = p_o.Last();
            var weight_init = new Xavier(rnd_type: "gaussian", factor_type: "out", magnitude: 2);

            foreach (var _tup_2 in num_filters.Select((_p_1, _p_2) => Tuple.Create(_p_2, _p_1)))
            {
                var i = _tup_2.Item1;
                var f = _tup_2.Item2;
                if (use_1x1_transition)
                {
                    var num_trans = Math.Max(min_depth, Convert.ToInt32(Math.Round(f * reduce_ratio)));

                    y = sym.Convolution(y, weight_init.InitWeight("weight"), null, num_filter: num_trans, kernel: new Shape(1, 1), no_bias: use_bn, symbol_name: $"expand_trans_conv{i}");

                    if (use_bn)
                    {
                        y = sym.BatchNorm(y, null, null, null, null, symbol_name: $"expand_trans_bn{i}");
                    }

                    y = sym.Activation(y, act_type: ActivationType.Relu, symbol_name: $"expand_trans_relu{i}");
                }

                y = sym.Convolution(y, weight_init.InitWeight("weight"), null, num_filter: f, kernel: new Shape(3, 3), pad: new Shape(1, 1), stride: new Shape(2, 2), no_bias: use_bn, symbol_name: $"expand_conv{i}");
                if (use_bn)
                {
                    y = sym.BatchNorm(y, null, null, null, null, symbol_name: $"expand_bn{i}");
                }

                y = sym.Activation(y, act_type: ActivationType.Relu, symbol_name: $"expand_reu{i}");
                p_o.Add(y);
            }
            if (global_pool)
            {
                p_o.Add(sym.Pooling(y, pool_type: PoolingType.Avg, global_pool: true, kernel: new Shape(1, 1)));
            }

            base.Construct(p_o, p_i, p_p);
        }
示例#8
0
 public FPNFeatureExpander(HybridBlock network, string[] outputs, int num_filters, bool use_1x1 = true, bool use_upsample = true,
                           bool use_elewadd = true, bool use_p6             = false, bool p6_conv = true, bool no_bias = true,
                           bool pretrained  = false, HybridBlock norm_layer = null, FuncArgs norm_kwargs = null, Context ctx = null, string[] inputs = null, ParameterDict @params = null) : base(null, null, @params)
 {
     throw new NotImplementedException();
 }
示例#9
0
 public AlphaPose(HybridBlock preact, int num_joints, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#10
0
 public FeatureExpander(HybridBlock network, string[] outputs, int num_filters, bool use_1x1_transition = true,
                        bool use_bn     = true, float reduce_ratio = 1, int min_depth      = 128, bool global_pool       = false,
                        bool pretrained = false, Context ctx       = null, string[] inputs = null, ParameterDict @params = null) : base(null, null, @params)
 {
 }
示例#11
0
 private HybridSequential MakeLayer(HybridBlock block, int[] layers, int[] channels, int stride, int stage_index, int in_channels = 0, string norm_layer = "", bool last_gamma = false)
 {
     throw new NotImplementedException();
 }
示例#12
0
 public static HybridSequential AddBlock(HybridSequential @out, HybridBlock block)
 {
     throw new NotImplementedException();
 }
示例#13
0
 public Bottleneck(int planes, int inplanes, HybridBlock downsample = null, bool reduction = false, string norm_layer = "BatchNorm", FuncArgs kwargs = null, string prefix = "", ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
 public YOLO3DefaultTrainTransform(int width, int height, HybridBlock net = null, (float, float, float)?mean = null, (float, float, float)?std = null, float iou_thresh = 0.5f)
示例#15
0
 public MaskRCNN(HybridBlock features, HybridBlock top_features, string[] classes, int mask_channels = 256, int rcnn_max_dets = 1000, int rpn_test_pre_nms = 6000, int rpn_test_post_nms = 1000, int target_roi_scale = 1, int num_fcn_convs = 0, string norm_layer = "", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(features: features, top_features: top_features, classes: classes, rpn_test_pre_nms: rpn_test_pre_nms, rpn_test_post_nms: rpn_test_post_nms, additional_output: true, prefix: prefix, @params: @params)
 {
     throw new NotImplementedException();
 }
示例#16
0
 private HybridSequential MakeLayer(HybridBlock block, int[] layers, int[] channels, int stride, int stage_index, int in_channels = 0, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null)
 {
     throw new NotImplementedException();
 }
示例#17
0
 public BottleneckV1b(int planes, int strides = 1, int dilation = 1, HybridBlock downsample = null, int previous_dilation = 1, string norm_layer = "", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#18
0
 public CenterNet(HybridBlock base_network, Dictionary <string, Dictionary <string, object> > heads, int classes, int head_conv_channel = 0, float scale = 4, int topk = 100, bool flip_test = false, int nms_thresh = 0, int nms_topk = 400, int post_nms = 100, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#19
0
 public RCNN(HybridBlock features, HybridBlock top_features, string[] classes, HybridBlock box_features, int @short, int max_size, string train_patterns,
             float nms_thresh, int nms_topk, int post_nms, string roi_mode, (int, int) roi_size, (int, int)  strides, float?clip, bool force_nms = false, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
示例#20
0
 public SE_ResNetV1(HybridBlock block, int[] layers, int[] channels, int classes = 1000, bool thumbnail = false, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#21
0
 public FeatureExtractor(HybridBlock network, string[] outputs, string[] inputs = null, bool pretrained = false, ParameterDict @params = null) : base(null, null, @params)
 {
     throw new NotImplementedException();
 }
 public FasterRCNNTrainBatchify(HybridBlock net, int num_shards = 1)
 {
     throw new NotImplementedException();
 }
 public FasterRCNNDefaultTrainTransform(int @short = 600, int max_size = 1000, HybridBlock net = null, (float, float, float)?mean = null, (float, float, float)?std = null, (float, float, float)?box_norm = null, int num_sample = 256, float pos_iou_thresh = 0.7f, float neg_iou_thresh = 0.3f, float pos_ratio = 0.5f, float flip_p = 0.5f, int ashape = 128, bool multi_stage = false)
示例#24
0
 public CIFARResNetV2(HybridBlock block, int[] layers, int[] channels, int classes = 10, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = "", ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#25
0
 public static void ExportBlock(string path, HybridBlock block, Shape data_shape = null, int epoch = 0, bool preprocess = true, string layout = "HWC", Context ctx = null)
 {
     throw new NotImplementedException();
 }
示例#26
0
 public Tree(int levels, HybridBlock block, int in_channels, int out_channels, int stride = 1, bool level_root = false, int root_dim = 0, int root_kernel_size = 1,
             int dilation = 1, bool root_residual = false, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params)
 {
     throw new NotImplementedException();
 }
示例#27
0
 public static (SymbolList, SymbolList, ParameterDict) ParseNetwork(HybridBlock network, string[] outputs, string[] inputs, bool pretrained, Context ctx)
 {
     throw new NotImplementedException();
 }
示例#28
0
 public HybridSequential MakeLayer(HybridBlock block, int planes, int blocks, int stride = 1)
 {
     throw new NotImplementedException();
 }
示例#29
0
        public static void ExportBlock(string path, HybridBlock block, Shape data_shape = null, int epoch = 0, HybridBlock preprocess = null, string layout = "HWC", Context ctx = null)
        {
            if (ctx == null)
            {
                ctx = mx.Cpu();
            }

            NDArray          x;
            int              t;
            int              c;
            int              w;
            int              h;
            HybridSequential wrapper_block;
            List <Shape>     data_shapes = new List <Shape>();

            // input image layout
            layout = layout.ToUpper();
            if (data_shape == null)
            {
                if (layout == "HWC")
                {
                    data_shapes = (from s in new int[] { 224, 256, 299, 300, 320, 416, 512, 600 }
                                   select new Shape(s, s, 3)).ToList();
                }
                else if (layout == "CHW")
                {
                    data_shapes = (from s in new int[] { 224, 256, 299, 300, 320, 416, 512, 600 }
                                   select new Shape(3, s, s)).ToList();
                }
                else
                {
                    throw new Exception("Unable to predict data_shape, please specify.");
                }
            }
            else
            {
                data_shapes.Add(data_shape);
            }

            if (preprocess != null)
            {
                wrapper_block = new HybridSequential();
                preprocess.Initialize(ctx: new Context[] { ctx });
                wrapper_block.Add(preprocess);
                wrapper_block.Add(block);
            }
            else
            {
                wrapper_block = new HybridSequential();
                preprocess.Initialize(ctx: new Context[] { ctx });
                wrapper_block.Add(block);
                Debug.Assert(new string[] { "CHW", "CTHW" }.Contains(layout), $"Default layout is CHW for 2D models and CTHW for 3D models if preprocess is None, provided {layout}.");
            }

            wrapper_block.CollectParams().ResetCtx(ctx);
            // try different data_shape if possible, until one fits the network
            object last_exception = null;

            foreach (var dshape in data_shapes)
            {
                if (layout == "HWC")
                {
                    (h, w, c) = dshape;
                    x         = nd.Zeros(new Shape(1, h, w, c), ctx: ctx);
                }
                else if (layout == "CHW")
                {
                    (c, h, w) = dshape;
                    x         = nd.Zeros(new Shape(1, c, h, w), ctx: ctx);
                }
                else if (layout == "THWC")
                {
                    (t, h, w, c) = dshape;
                    x            = nd.Zeros(new Shape(1, t, h, w, c), ctx: ctx);
                }
                else if (layout == "CTHW")
                {
                    (c, t, h, w) = dshape;
                    x            = nd.Zeros(new Shape(1, c, t, h, w), ctx: ctx);
                }
                else
                {
                    throw new Exception(String.Format("Input layout {0} is not supported yet.", layout));
                }

                // hybridize and forward once
                wrapper_block.Hybridize();
                try
                {
                    wrapper_block.Call(x);
                    wrapper_block.Export(path, epoch);
                    last_exception = null;
                    break;
                }
                catch (MXNetException ex)
                {
                    last_exception = ex;
                }
            }
            if (last_exception != null)
            {
                throw new Exception(last_exception.ToString().Split('\n')[0]);
            }
        }