public BBoxArea(int axis = -1, string fmt = "corner", string prefix = "", ParameterDict @params = null) : base(prefix, @params) { if (fmt.ToLower() == "corner") { this._pre = new BBoxCornerToCenter(split: true); } else if (fmt.ToLower() == "center") { this._pre = new BBoxSplit(axis: axis); } else { throw new Exception($"Unsupported format: {fmt}. Use 'corner' or 'center'."); } }
public GRUCell(int hidden_size, string i2h_weight_initializer = null, string h2h_weight_initializer = null, string i2h_bias_initializer = "zeros", string h2h_bias_initializer = "zeros", int input_size = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { _hidden_size = hidden_size; _input_size = input_size; this["i2h_weight"] = Params.Get("i2h_weight", shape: new Shape(hidden_size, input_size), init: Initializer.Get(i2h_weight_initializer), allow_deferred_init: true); this["h2h_weight"] = Params.Get("h2h_weight", shape: new Shape(hidden_size, hidden_size), init: Initializer.Get(h2h_weight_initializer), allow_deferred_init: true); this["i2h_bias"] = Params.Get("i2h_bias", shape: new Shape(hidden_size), init: Initializer.Get(i2h_bias_initializer), allow_deferred_init: true); this["h2h_bias"] = Params.Get("h2h_bias", shape: new Shape(hidden_size), init: Initializer.Get(h2h_bias_initializer), allow_deferred_init: true); }
public BasicBlockV2(int channels, int stride, bool downsample = false, int in_channels = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { bn1 = new BatchNorm(); conv1 = ResNet.Conv3x3(channels, stride, in_channels); bn2 = new BatchNorm(); conv2 = ResNet.Conv3x3(channels, 1, in_channels); if (downsample) { ds = new Conv2D(channels, (1, 1), (stride, stride), use_bias: false, in_channels: in_channels); } else { ds = null; } }
public DefaultPreprocess(string prefix = "", ParameterDict @params = null) : base(prefix, @params) { var mean = nd.Array(new float[] { 123.675f, 116.28f, 103.53f }).Reshape(1, 1, 1, 3); var scale = nd.Array(new float[] { 58.395f, 57.12f, 57.375f }).Reshape(1, 1, 1, 3); this["init_mean"] = Params.GetConstant("init_mean", mean); this["init_scale"] = Params.GetConstant("init_scale", scale); }
public Dense(int units, ActivationType?activation = null, bool use_bias = true, bool flatten = true, DType dtype = null, Initializer weight_initializer = null, string bias_initializer = "zeros", int in_units = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { Units = units; Act = activation != null ? new Activation(activation.Value) : null; UseBias = use_bias; Flatten_ = flatten; DataType = dtype; this["weight"] = Params.Get("weight", OpGradReq.Write, new Shape(units, in_units), dtype, init: weight_initializer, allow_deferred_init: true); if (UseBias) { this["bias"] = Params.Get("bias", OpGradReq.Write, new Shape(units), dtype, init: Initializer.Get(bias_initializer), allow_deferred_init: true); } }
public LSTMCell(int hidden_size, string activation = "tanh", string recurrent_activation = "sigmoid", string i2h_weight_initializer = null, string h2h_weight_initializer = null, string i2h_bias_initializer = "zeros", string h2h_bias_initializer = "zeros", int input_size = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { _hidden_size = hidden_size; _activation = activation; _input_size = input_size; _recurrent_activation = recurrent_activation; this["i2h_weight"] = Params.Get("i2h_weight", shape: new Shape(hidden_size, input_size), init: Initializer.Get(i2h_weight_initializer), allow_deferred_init: true); this["h2h_weight"] = Params.Get("h2h_weight", shape: new Shape(hidden_size, hidden_size), init: Initializer.Get(h2h_weight_initializer), allow_deferred_init: true); this["i2h_bias"] = Params.Get("i2h_bias", shape: new Shape(hidden_size), init: Initializer.Get(i2h_bias_initializer), allow_deferred_init: true); this["h2h_bias"] = Params.Get("h2h_bias", shape: new Shape(hidden_size), init: Initializer.Get(h2h_bias_initializer), allow_deferred_init: true); }
public CTCLoss(string layout = "NTC", string label_layout = "NT", float?weight = null, int?batch_axis = 0, string prefix = "", ParameterDict @params = null) : base(weight, batch_axis, prefix, @params) { if (layout != "NTC" && layout != "TNC") { throw new ArgumentException($"Only 'NTC' and 'TNC' layouts for pred are supported. Got: {layout}"); } if (label_layout != "NT" && label_layout != "TN") { throw new ArgumentException( $"Only 'NTC' and 'TNC' layouts for label are supported. Got: {label_layout}"); } Layout = layout; LabelLayout = label_layout; BatchAxis = label_layout.ToCharArray().ToList().IndexOf('N'); }
public static BlockScope Create(string prefix, ParameterDict @params, string hint) { // Expr var current = Getattr(BlockScope._current, "value", null); if (IsTrue((current is null))) { if (IsTrue((prefix is null))) { if (IsTrue((!IsTrue(Hasattr(_name.NameManager._current, "value"))))) { _name.NameManager._current.Value = _name.NameManager(); } prefix = (_name.NameManager._current.Value.Get(null, hint) + "_"); } if (IsTrue((@params is null))) { @params = new ParameterDict(prefix); }
public ResNetV2(string block, int[] layers, int[] channels, int classes = 1000, bool thumbnail = false, string prefix = "", ParameterDict @params = null) : base(prefix, @params) { if (layers.Length != channels.Length - 1) { throw new Exception("layers.length should be equal to channels.length - 1"); } Features = new HybridSequential("features"); Features.Add(new BatchNorm(scale: false, center: false)); if (thumbnail) { Features.Add(ResNet.Conv3x3(channels[0], 1, 0)); } else { Features.Add(new Conv2D(channels[0], (7, 7), (2, 2), (3, 3), use_bias: false)); Features.Add(new BatchNorm()); Features.Add(new Activation(ActivationType.Relu)); Features.Add(new MaxPool2D((3, 3), (2, 2), (1, 1))); } var in_channels = channels[0]; for (var i = 0; i < layers.Length; i++) { var stride = i == 0 ? 1 : 2; var num_layer = layers[i]; Features.Add(MakeLayer(block, num_layer, channels[i + 1], stride, i + 1, in_channels)); in_channels = channels[i + 1]; } Features.Add(new BatchNorm()); Features.Add(new Activation(ActivationType.Relu)); Features.Add(new GlobalAvgPool2D()); Features.Add(new Flatten()); Output = new Dense(classes, in_units: in_channels); RegisterChild(Features, "features"); RegisterChild(Output, "output"); }
/// <summary> /// Start Computing. Is called while rendering an animation. /// </summary> public void Run(int updateSteps) { _currentProgress = 0; _master.Progress(_currentProgress); System.Diagnostics.Debug.WriteLine("PaintJob.Run " + updateSteps.ToString()); _parameters = ParameterDict.Current.Clone(); _updateSteps = updateSteps; _currentProgressd = 100.0 / (double)(_updateSteps); for (int i = 0; i < _updateSteps; i++) { if (_abort) { return; } _iterate = new Iterate(_parameters, this, false); if (_lastIterate != null) { _iterate.SetOldData(_lastIterate.GraphicInfo, _lastIterate.PictureData, i); } if (_abort) { return; } _iterate.StartAsync(); _iterate.Wait(); if (_abort) { return; } _lastIterate = _iterate; _currentProgress += _currentProgressd; _master.Progress(_currentProgress); } Renderer renderer = PictureArtFactory.Create(_iterate.PictureData, _iterate.LastUsedFormulas, ParameterDict.Current.Clone()); renderer.Paint(_graphics); if (_abort) { return; } _master.Progress(0); }
public BottleneckV2(int channels, int stride, bool downsample = false, int in_channels = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { var channel_one_fourth = Convert.ToInt32(channels / 4); bn1 = new BatchNorm(); conv1 = new Conv2D(channel_one_fourth, (1, 1), (1, 1), use_bias: false); bn2 = new BatchNorm(); conv2 = ResNet.Conv3x3(channel_one_fourth, stride, channel_one_fourth); bn3 = new BatchNorm(); conv3 = new Conv2D(channels, (1, 1), (stride, stride), use_bias: false, in_channels: in_channels); if (downsample) { ds = new Conv2D(channels, (1, 1), (stride, stride), use_bias: false, in_channels: in_channels); } else { ds = null; } }
public BasicBlockV1(int channels, int stride, bool downsample = false, int in_channels = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { body = new HybridSequential(""); body.Add(ResNet.Conv3x3(channels, stride, in_channels)); body.Add(new BatchNorm()); body.Add(new Activation(ActivationType.Relu)); body.Add(ResNet.Conv3x3(channels, 1, in_channels)); body.Add(new BatchNorm()); if (downsample) { ds = new HybridSequential(); ds.Add(new Conv2D(channels, (1, 1), (stride, stride), use_bias: false, in_channels: in_channels)); ds.Add(new BatchNorm()); } else { ds = null; } }
public BatchNorm(int axis = 1, float momentum = 0.9f, float epsilon = 1e-5f, bool center = true, bool scale = true, bool use_global_stats = false, string beta_initializer = "zeros", string gamma_initializer = "ones", string running_mean_initializer = "zeros", string running_variance_initializer = "ones", int in_channels = 0, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { Axis = axis; Momentum = momentum; Epsilon = epsilon; Center = center; Scale = scale; Use_Global_Stats = use_global_stats; In_Channels = in_channels; this["gamma"] = Params.Get("gamma", scale ? OpGradReq.Write : OpGradReq.Null, new Shape(in_channels), init: Initializer.Get(gamma_initializer), allow_deferred_init: true, differentiable: scale); this["beta"] = Params.Get("beta", center ? OpGradReq.Write : OpGradReq.Null, new Shape(in_channels), init: Initializer.Get(beta_initializer), allow_deferred_init: true, differentiable: center); this["running_mean"] = Params.Get("running_mean", OpGradReq.Null, new Shape(in_channels), init: Initializer.Get(running_mean_initializer), allow_deferred_init: true, differentiable: false); this["running_var"] = Params.Get("running_var", OpGradReq.Null, new Shape(in_channels), init: Initializer.Get(running_variance_initializer), allow_deferred_init: true, differentiable: false); }
public _Conv(int channels, int[] kernel_size, int[] strides, int[] padding, int[] dilation, int groups, string layout, int in_channels = 0, ActivationType?activation = null, bool use_bias = true, Initializer weight_initializer = null, string bias_initializer = "zeros", int[] adj = null, string op_name = "Convolution", string prefix = "", ParameterDict @params = null) : base(prefix, @params) { NumFilter = channels; InChannels = in_channels; Strides = strides; Padding = padding; Dialation = dilation; _op_name = op_name; KernalSize = kernel_size; NumGroup = groups; Layout = layout; Activation = activation.HasValue ? new Activation(activation.Value) : null; UseBias = use_bias; WeightInitializer = weight_initializer; BiasInitializer = bias_initializer; Adj = adj; var dshape = new int[kernel_size.Length + 2]; dshape[layout.ToCharArray().ToList().IndexOf('N')] = 1; dshape[layout.ToCharArray().ToList().IndexOf('C')] = in_channels; var wshapes = _infer_weight_shape(op_name, new Shape(dshape)); this["weight"] = Params.Get("weight", OpGradReq.Write, wshapes[1], init: weight_initializer, allow_deferred_init: true); if (UseBias) { this["bias"] = Params.Get("bias", OpGradReq.Write, wshapes[2], init: Initializer.Get(bias_initializer), allow_deferred_init: true); } else { this["bias"] = null; } }
void RemoveUnusedParameters(string tomoSource) { lock (lockRemoveUnusedParameters) { try { if (tomoSource.Contains("SetParameterBulk")) { return; } Dictionary <string, bool> usedParameters = UsedParameters(tomoSource); List <string> entriesToDelete = new List <string>(); foreach (KeyValuePair <string, string> entry in ParameterDict.Current.Entries) { if (entry.Key.StartsWith("Formula.Parameters.")) { if (!ParameterDict.IsAdditionalInfo(entry.Key)) { string parameterName = entry.Key.Substring("Formula.Parameters.".Length); if (!usedParameters.ContainsKey(parameterName)) { entriesToDelete.Add(parameterName); } } } } foreach (string entryToDelete in entriesToDelete) { ParameterDict.Current.RemoveProperty("Formula.Parameters." + entryToDelete); } } catch (System.InvalidOperationException) { // throwed if enumeration is not possible because ParameterDict.Current.Entries has changed in other thread. } } }
public AlexNet(int classes = 1000, string prefix = "", ParameterDict @params = null) : base(prefix, @params) { Features = new HybridSequential(prefix); Features.Add(new Conv2D(64, (11, 11), (4, 4), (2, 2), activation: ActivationType.Relu)); Features.Add(new MaxPool2D((3, 3), (2, 2))); Features.Add(new Conv2D(192, (5, 5), padding: (2, 2), activation: ActivationType.Relu)); Features.Add(new MaxPool2D((3, 3), (2, 2))); Features.Add(new Conv2D(384, (3, 3), padding: (1, 1), activation: ActivationType.Relu)); Features.Add(new Conv2D(256, (3, 3), padding: (1, 1), activation: ActivationType.Relu)); Features.Add(new Conv2D(256, (3, 3), padding: (1, 1), activation: ActivationType.Relu)); Features.Add(new MaxPool2D((3, 3), (2, 2))); Features.Add(new Flatten()); Features.Add(new Dense(4096, ActivationType.Relu)); Features.Add(new Dropout(0.5f)); Features.Add(new Dense(4096, ActivationType.Relu)); Features.Add(new Dropout(0.5f)); Output = new Dense(classes); RegisterChild(Features); RegisterChild(Output); }
public Dropout(float rate, Shape axes = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { Rate = rate; Axes = axes; }
public AvgPoolPad(string prefix = null, ParameterDict @params = null) : base(prefix, @params) { }
public ActionRecInceptionV3(int nclass, bool pretrained_base = true, bool partial_bn = true, float dropout_ratio = 0.8f, float init_std = 0.001f, int feat_dim = 2048, int num_segments = 1, int num_crop = 1, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }
private static ISessionApiRequest<GetMessagesReply> GenericGetMessages( string methodName, Method httpMethod, ParameterDict parameters) { // {"response":[4680,{"body":"thank you...<br>yeah...it'll pass....","title":"Re: ...", // "date":1268213453,"uid":28672529,"mid":10836,"read_state":0},{"body":"nothing for)", // "title":"Re: ...","date":1268213443,"uid":58416643,"mid":10835,"read_state":0}]} return new SessionApiRequest<GetMessagesReply>( methodName, httpMethod, parameters, json => new GetMessagesReply { TotalCount = json.First().ToObject<int>(), Messages = from message in json.Skip(1) select message.ToObject<VkMessage>() } ); }
/// <summary> /// Generates the compressed formula. /// The formula parameters and the formula itself are combined in a compact text, /// which can later be copied into the formula text window to get the current /// formula configuration. /// </summary> public static string GenerateCompressedFormulaAndViewSettings() { string formula = ParameterDict.Current["Intern.Formula.Source"]; List <string> formulaSettingCategories = new List <string>(); formulaSettingCategories.Add("Scene"); formulaSettingCategories.Add("Transformation.Camera"); formulaSettingCategories.Add("Transformation.Perspectice"); formulaSettingCategories.Add("Formula"); formulaSettingCategories.Add("Renderer"); formulaSettingCategories.Add("Renderer.BackColor"); formulaSettingCategories.Add("Renderer.ColorFactor"); formulaSettingCategories.Add("Renderer.Light"); // To make the new settings unique ParameterDict.Current["intern.Formula.TempUpdateVal"] = "vv"; string testHash = ParameterDict.Current.GetHash(""); string insertSettingsStringHere = "base.Init();"; if (!formula.Contains(insertSettingsStringHere)) { formula = @" public override void Init() { base.Init(); } " + formula; } StringBuilder settingsString = new StringBuilder(); settingsString.Append("if(GetString(\"intern.Formula.TempUpdateVal\")!=\"" + testHash + "\"){"); settingsString.Append("SetParameterBulk(\""); foreach (KeyValuePair <string, string> entry in ParameterDict.Current.SortedEntries) { bool isInCategorie = false; foreach (string testCategorie in formulaSettingCategories) { if (entry.Key.StartsWith(testCategorie)) { isInCategorie = true; break; } } if (isInCategorie) { if (!ParameterDict.IsAdditionalInfo(entry.Key)) { settingsString.Append("<Entry Key='" + entry.Key + "' Value='" + entry.Value + "' />"); } } } // fix this formula to testHash settingsString.Append("<Entry Key='intern.Formula.TempUpdateVal' Value='" + testHash + "' />"); settingsString.Append("\");"); settingsString.Append("}"); formula = formula.Replace(insertSettingsStringHere, insertSettingsStringHere + settingsString.ToString()); StringBuilder retVal = new StringBuilder(); retVal.Append(CompressFormula(formula)); return(retVal.ToString()); }
public FastSEResNet(string architecture, string norm_layer = "BatchNorm", FuncArgs kwargs = null, string prefix = "", ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }
public DarknetV3(int[] layers, int[] channels, int classes = 1000, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { Debug.Assert(layers.Length == channels.Length - 1, $"len(channels) should equal to len(layers) + 1, given {channels.Length} vs {layers.Length}"); this.features = new HybridSequential(); // first 3x3 conv this.features.Add(Conv2d(channels[0], 3, 1, 1, norm_layer: norm_layer, norm_kwargs: norm_kwargs)); for (int i = 0; i < layers.Length; i++) { int nlayer = layers[i]; int channel = channels[i]; Debug.Assert(channel % 2 == 0, $"channel {channel} cannot be divided by 2"); // add downsample conv with stride=2 this.features.Add(Conv2d(channel, 3, 1, 2, norm_layer: norm_layer, norm_kwargs: norm_kwargs)); // add nlayer basic blocks foreach (var _ in Enumerable.Range(0, nlayer)) { this.features.Add(new DarknetBasicBlockV3(channel / 2, norm_layer: norm_layer, norm_kwargs: norm_kwargs)); } } // output this.output = new Dense(classes); RegisterChild(features); RegisterChild(output); }
public L2Loss(float?weight = 1, int?batch_axis = 0, string prefix = null, ParameterDict @params = null) : base(weight, batch_axis, prefix, @params) { }
public MaskRCNN(HybridBlock features, HybridBlock top_features, string[] classes, int mask_channels = 256, int rcnn_max_dets = 1000, int rpn_test_pre_nms = 6000, int rpn_test_post_nms = 1000, int target_roi_scale = 1, int num_fcn_convs = 0, string norm_layer = "", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(features: features, top_features: top_features, classes: classes, rpn_test_pre_nms: rpn_test_pre_nms, rpn_test_post_nms: rpn_test_post_nms, additional_output: true, prefix: prefix, @params: @params) { throw new NotImplementedException(); }
public SparseEmbedding(int input_dim, int output_dim, DType dtype = null, string weight_initializer = null, string prefix = "", ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }
public IDAUp(int out_channels, int in_channels, float[] up_f, bool use_dcnv2 = false, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = "", ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }
public GoogLeNet(int classes = 1000, string norm_layer = "BatchNorm", float dropout_ratio = 0.4f, bool aux_logits = false, FuncArgs norm_kwargs = null, bool partial_bn = false, bool pretrained_base = true, Context ctx = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }
public SoftmaxCELoss(int axis = -1, bool sparse_label = true, bool from_logits = false, float?weight = null, int?batch_axis = 0, string prefix = null, ParameterDict @params = null) : base( axis, sparse_label, from_logits, weight, batch_axis, prefix, @params) { }
public SoftmaxCrossEntropyLoss(int axis = -1, bool sparse_label = true, bool from_logits = false, float?weight = null, int?batch_axis = 0, string prefix = null, ParameterDict @params = null) : base( weight, batch_axis, prefix, @params) { _axis = axis; _sparse_label = sparse_label; _from_logits = from_logits; }
public Tree(int levels, HybridBlock block, int in_channels, int out_channels, int stride = 1, bool level_root = false, int root_dim = 0, int root_kernel_size = 1, int dilation = 1, bool root_residual = false, string norm_layer = "BatchNorm", FuncArgs norm_kwargs = null, string prefix = null, ParameterDict @params = null) : base(prefix, @params) { throw new NotImplementedException(); }