/// <summary>Gets a unique hash for the input spec.</summary> /// <param name="context">The context.</param> /// <param name="inputSpec">The input spec.</param> /// <returns>The unique hash>.</returns> private static string GetInputSpecHash(IWebGreaseContext context, InputSpec inputSpec) { return (inputSpec.GetFiles(context.Configuration.SourceDirectory) .ToDictionary(f => f.MakeRelativeToDirectory(context.Configuration.SourceDirectory), context.GetFileHash) .ToJson()); }
public Conv(int rank, int filters, int[] kernel_size, int[] strides = null, string padding = "valid", string data_format = null, int[] dilation_rate = null, IActivation activation = null, bool use_bias = true, IInitializer kernel_initializer = null, IInitializer bias_initializer = null, bool trainable = true, string name = null) : base(trainable: trainable, name: name) { this.rank = rank; this.filters = filters; this.kernel_size = kernel_size; this.strides = strides; this.padding = padding; this.data_format = data_format; this.dilation_rate = dilation_rate; this.activation = activation; this.use_bias = use_bias; this.kernel_initializer = kernel_initializer; this.bias_initializer = bias_initializer; input_spec = new InputSpec(ndim: rank + 2); }
/// <summary>The method called from the regex replace to replace the matched wgInclude() statements.</summary> /// <param name="match">The regex match</param> /// <param name="workingFolder">The working folder from which to determine relative path's in the include.</param> /// <param name="cacheSection">The cache Section.</param> /// <param name="minimalOutput">Is the goal to have the most minimal output (true skips lots of comments)</param> /// <returns>The contents of the file to replace, with a /* WGINCLUDE [fullFilePath] */ header on top.</returns> private static string ReplaceInputs(Match match, string workingFolder, ICacheSection cacheSection, bool minimalOutput) { var fileOrPath = Path.Combine(workingFolder, match.Groups["fileOrPath"].Value.Trim()); var inputSpec = new InputSpec { IsOptional = true, Path = fileOrPath }; if (Directory.Exists(fileOrPath)) { inputSpec.SearchPattern = match.Groups["searchPattern"].Value.Trim(); } cacheSection.AddSourceDependency(inputSpec); var result = string.Empty; foreach (var file in inputSpec.GetFiles()) { if (!minimalOutput) { result += "/* WGINCLUDE: {0} */\r\n".InvariantFormat(file); } result += File.ReadAllText(file) + "\r\n"; } return(result); }
public void SkippedBinsAreIgnoredInStats() { var outputDirectory = Path.GetRandomFileName().ToDirectoryInfo(); var alternatingBins = ImmutableList <(uint size, bool skip)> .Empty .Add((1, true)) .Add((100000, false)) .Add((200000, true)) .Add((300000, false)); var inputSpecs = InputSpec.GenerateDefaultInputSpecs(true) .Select(i => InputSpec.Create(i.VariantType, alternatingBins, 1, 1, i.ExcludedFilters, i.IncludedFilters, i.IncludedRegions)) .ToDictionary(i => i.VariantType, i => i); var wittyerSettings = WittyerSettings.Create(outputDirectory, TinyTruth, TinyQuery, ImmutableList <ISamplePair> .Empty, EvaluationMode.SimpleCounting, inputSpecs); var(_, query, truth) = MainLauncher.GenerateResults(wittyerSettings).EnumerateSuccesses().First(); var results = MainLauncher.GenerateSampleMetrics(truth, query, false, inputSpecs); Assert.Equal(1U, results.OverallStats[StatsType.Event].QueryStats.TrueCount); Assert.Equal(1U, results.OverallStats[StatsType.Event].QueryStats.FalseCount); Assert.Equal(0.5, results.EventLevelRecallOverall.First(typeRecallTuple => typeRecallTuple.type == WittyerType.CopyNumberGain).recall); var numberOfBinsReportedOn = results.EventLevelRecallPerBin.First().perBinRecall.Count(); Assert.Equal(2, numberOfBinsReportedOn); }
public UpSampling2D(UpSampling2DArgs args) : base(args) { this.args = args; data_format = conv_utils.normalize_data_format(args.DataFormat); size = conv_utils.normalize_tuple(args.Size, 2, "size"); inputSpec = new InputSpec(ndim: 4); }
public Flatten(FlattenArgs args) : base(args) { args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); input_spec = new InputSpec(min_ndim: 1); _channels_first = args.DataFormat == "channels_first"; }
public ZeroPadding2D(ZeroPadding2DArgs args, string data_format = null) : base(args) { this.data_format = conv_utils.normalize_data_format(data_format); this.padding = args.Padding; this.input_spec = new InputSpec(ndim: 4); }
/// <summary>Add a source dependency from an input spec.</summary> /// <param name="inputSpec">The input spec.</param> public void AddSourceDependency(InputSpec inputSpec) { this.isUnsaved = true; var key = inputSpec.ToJson(true); Safe.UniqueKeyLock( key, Safe.MaxLockTimeout, () => { if (!this.sourceDependencies.ContainsKey(key)) { this.sourceDependencies.Add( key, CacheSourceDependency.Create( this.context, new InputSpec { IsOptional = inputSpec.IsOptional, Path = inputSpec.Path, SearchOption = inputSpec.SearchOption, SearchPattern = inputSpec.SearchPattern })); } }); }
protected override void build(TensorShape input_shape) { var last_dim = input_shape.dims.Last(); var axes = new Dictionary <int, int>(); axes[-1] = last_dim; input_spec = new InputSpec(min_ndim: 2, axes: axes); kernel = add_weight( "kernel", shape: new int[] { last_dim, units }, initializer: kernel_initializer, dtype: _dtype, trainable: true); if (use_bias) { bias = add_weight( "bias", shape: new int[] { units }, initializer: bias_initializer, dtype: _dtype, trainable: true); } built = true; }
public void OverlapWorks_DupDel([NotNull] string truthVar, [NotNull] string queryVar, string type, bool isTp) { const string sampleName = "blah"; var vcfVariantParserSettings = VcfVariantParserSettings.Create(new List <string> { sampleName }); var baseVariant = VcfVariant.TryParse(truthVar, vcfVariantParserSettings).GetOrThrow(); const bool isCrossTypeOn = true; var wittyerType = WittyerType.Parse(type); var inputSpecs = InputSpec.GenerateCustomInputSpecs(!isCrossTypeOn, new[] { wittyerType }, percentDistance: PercentDistance).ToDictionary(s => s.VariantType, s => s); var bndSet = new Dictionary <IGeneralBnd, IVcfVariant>(); var errorList = new List <string>(); var truthV = (IMutableWittyerSimpleVariant)WittyerVcfReader.CreateVariant(baseVariant, baseVariant.Samples.First().Value, true, sampleName, inputSpecs, bndSet, errorList, isCrossTypeOn); baseVariant = VcfVariant.TryParse(queryVar, vcfVariantParserSettings).GetOrThrow(); var queryV = (IMutableWittyerVariant)WittyerVcfReader.CreateVariant(baseVariant, baseVariant.Samples.First().Value, false, sampleName, inputSpecs, bndSet, errorList, isCrossTypeOn); var tree = TruthForest.Create(sampleName, VcfHeader.CreateBuilder(VcfVersion.FourPointOne).Build()); tree.AddTarget(truthV); OverlappingUtils.DoOverlapping(tree.VariantTrees, queryV, OverlappingUtils.IsVariantAlleleMatch, isCrossTypeOn, true); queryV.Finalize(WitDecision.FalsePositive, EvaluationMode.CrossTypeAndSimpleCounting, null); truthV.Finalize(WitDecision.FalseNegative, EvaluationMode.CrossTypeAndSimpleCounting, null); Assert.Equal(isTp ? WitDecision.TruePositive : WitDecision.FalsePositive, queryV.Sample.Wit); Assert.Equal(isTp ? WitDecision.TruePositive : WitDecision.FalseNegative, truthV.Sample.Wit); }
protected override void build(TensorShape input_shape) { var last_dim = input_shape.dims.Last(); var axes = new Dictionary <int, int>(); axes[-1] = last_dim; inputSpec = new InputSpec(min_ndim: 2, axes: axes); kernel = add_weight( "kernel", shape: new TensorShape(last_dim, args.Units), initializer: args.KernelInitializer, dtype: DType, trainable: true); if (args.UseBias) { bias = add_weight( "bias", shape: new TensorShape(args.Units), initializer: args.BiasInitializer, dtype: DType, trainable: true); } built = true; }
public Pooling1D(Pooling1DArgs args) : base(args) { this.args = args; args.Padding = conv_utils.normalize_padding(args.Padding); args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); input_spec = new InputSpec(ndim: 3); }
public Pooling2D(Pooling2DArgs args) : base(args) { this.args = args; args.PoolSize = conv_utils.normalize_tuple(args.PoolSize, 2, "pool_size"); args.Strides = conv_utils.normalize_tuple(args.Strides ?? args.PoolSize, 2, "strides"); args.Padding = conv_utils.normalize_padding(args.Padding); args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); input_spec = new InputSpec(ndim: 4); }
public Conv(ConvArgs args) : base(args) { this.args = args; args.KernelSize = conv_utils.normalize_tuple(args.KernelSize.dims, args.Rank, "kernel_size"); args.Strides = conv_utils.normalize_tuple(args.Strides.dims, args.Rank, "strides"); args.Padding = conv_utils.normalize_padding(args.Padding); args.DataFormat = conv_utils.normalize_data_format(args.DataFormat); args.DilationRate = conv_utils.normalize_tuple(args.DilationRate.dims, args.Rank, "dilation_rate"); inputSpec = new InputSpec(ndim: rank + 2); _tf_data_format = conv_utils.convert_data_format(data_format, rank + 2); }
/// <summary>The add input specs.</summary> /// <param name="inputSpecs">The input specs.</param> /// <param name="sourceDirectory">The source directory.</param> /// <param name="element">The element.</param> internal static void AddInputSpecs(this IList <InputSpec> inputSpecs, string sourceDirectory, XElement element) { foreach (var inputElement in element.Descendants()) { var input = new InputSpec(inputElement, sourceDirectory); if (!string.IsNullOrWhiteSpace(input.Path)) { inputSpecs.Add(input); } } }
protected override void build(Tensors inputs) { Shape input_shape = inputs.shape; int channel_axis = data_format == "channels_first" ? 1 : -1; var input_channel = channel_axis < 0 ? input_shape.dims[input_shape.ndim + channel_axis] : input_shape.dims[channel_axis]; Shape kernel_shape = kernel_size.dims.concat(new long[] { input_channel / args.Groups, filters }); kernel = add_weight(name: "kernel", shape: kernel_shape, initializer: kernel_initializer, regularizer: kernel_regularizer, trainable: true, dtype: DType); if (use_bias) { bias = add_weight(name: "bias", shape: new int[] { filters }, initializer: bias_initializer, trainable: true, dtype: DType); } var axes = new Dictionary <int, int>(); axes.Add(-1, (int)input_channel); inputSpec = new InputSpec(min_ndim: rank + 2, axes: axes); string tf_padding; if (padding == "causal") { tf_padding = "VALID"; } else { tf_padding = padding.ToUpper(); } string tf_op_name = GetType().Name; _convolution_op = nn_ops.convolution_internal(tf_padding, strides, dilation_rate, rank, data_format: _tf_data_format, name: tf_op_name); built = true; }
/// <summary> /// Initialize the basic LSTM cell. /// </summary> /// <param name="num_units">The number of units in the LSTM cell.</param> /// <param name="forget_bias"></param> /// <param name="state_is_tuple"></param> /// <param name="activation"></param> /// <param name="reuse"></param> /// <param name="name"></param> /// <param name="dtype"></param> public BasicLstmCell(int num_units, float forget_bias = 1.0f, bool state_is_tuple = true, IActivation activation = null, bool?reuse = null, string name = null, TF_DataType dtype = TF_DataType.DtInvalid) : base(_reuse: reuse, name: name, dtype: dtype) { input_spec = new InputSpec(ndim: 2); _num_units = num_units; _forget_bias = forget_bias; _state_is_tuple = state_is_tuple; _activation = activation; if (_activation == null) { _activation = tf.nn.tanh(); } }
#pragma warning restore CS0108 // Member hides inherited member; missing new keyword public Pooling2D(IPoolFunction pool_function, int[] pool_size, int[] strides, string padding = "valid", string data_format = null, string name = null) : base(name: name) { this.pool_function = pool_function; this.pool_size = conv_utils.normalize_tuple(pool_size, 2, "pool_size"); this.strides = conv_utils.normalize_tuple(strides, 2, "strides"); this.padding = conv_utils.normalize_padding(padding); this.data_format = conv_utils.normalize_data_format(data_format); this.input_spec = new InputSpec(ndim: 4); }
protected override void build(TensorShape input_shape) { int channel_axis = data_format == "channels_first" ? 1 : -1; int input_dim = channel_axis < 0 ? input_shape.dims[input_shape.ndim + channel_axis] : input_shape.dims[channel_axis]; var kernel_shape = new int[] { kernel_size[0], kernel_size[1], input_dim, filters }; kernel = add_weight(name: "kernel", shape: kernel_shape, initializer: kernel_initializer, trainable: true, dtype: _dtype); if (use_bias) { bias = add_weight(name: "bias", shape: new int[] { filters }, initializer: bias_initializer, trainable: true, dtype: _dtype); } var axes = new Dictionary <int, int>(); axes.Add(-1, input_dim); input_spec = new InputSpec(ndim: rank + 2, axes: axes); string op_padding; if (padding == "causal") { op_padding = "valid"; } else { op_padding = padding; } var df = conv_utils.convert_data_format(data_format, rank + 2); _convolution_op = nn_ops.Convolution(input_shape, kernel.shape, op_padding.ToUpper(), strides, dilation_rate, data_format: df); built = true; }
public static void WittyerVariantReaderWorks() { var vcfSettings = VcfVariantParserSettings.Create(ImmutableList.Create("proband", "father"), GenomeAssembly.Grch37); var ref1 = VcfVariant.TryParse(RefSiteUndeterminedGt, vcfSettings).GetOrThrowDebug(); WittyerVcfReader.CreateVariant(ref1, ref1.Samples.First().Value, false, "proband", new Dictionary <WittyerType, InputSpec> { { WittyerType.CopyNumberReference, InputSpec.GenerateCustomInputSpecs(false, new[] { WittyerType.CopyNumberReference }, percentDistance: 0.05).First() } }, new Dictionary <IGeneralBnd, IVcfVariant>(), new List <string>(), true); }
public void CrossType_Works() { var outputDirectory = Path.GetRandomFileName().ToDirectoryInfo(); var inputSpecs = InputSpec.GenerateDefaultInputSpecs(false).Select(i => InputSpec.Create(i.VariantType, i.BinSizes, 10000, i.PercentDistance, i.ExcludedFilters, i.IncludedFilters, i.IncludedRegions)) .ToDictionary(i => i.VariantType, i => i); var wittyerSettings = WittyerSettings.Create(outputDirectory, TinyTruth, TinyQuery, ImmutableList <ISamplePair> .Empty, EvaluationMode.CrossTypeAndSimpleCounting, inputSpecs); var(_, query, truth) = MainLauncher.GenerateResults(wittyerSettings).EnumerateSuccesses().First(); var results = MainLauncher.GenerateSampleMetrics(truth, query, false, inputSpecs); Assert.Equal(4U, results.OverallStats[StatsType.Event].QueryStats.TrueCount); }
/// <summary>Gets all the files for an input spec.</summary> /// <param name="input">The input spec.</param> /// <param name="rootPath">The root path to calculate relative paths from</param> /// <param name="log">The logmanager to log progress to.</param> /// <param name="throwWhenMissingAndNotOptional">Throws an exception of set to true and a file does nog exist.</param> /// <returns>The files for the input spec</returns> public static IEnumerable <string> GetFiles(this InputSpec input, string rootPath = null, LogManager log = null, bool throwWhenMissingAndNotOptional = false) { var files = new List <string>(); var path = Path.Combine(rootPath ?? String.Empty, input.Path); if (File.Exists(path)) { // If the file exists it is a file, return the file. if (log != null) { log.Information("- {0}".InvariantFormat(path)); } files.Add(path); } else if (Directory.Exists(path)) { // If a directory with the name exists if (log != null) { log.Information("Folder: {0}, Pattern: {1}, Options: {2}".InvariantFormat(path, input.SearchPattern, input.SearchOption)); } // Get and Add all files using the searchpattern and options files.AddRange( Directory.EnumerateFiles(path, String.IsNullOrWhiteSpace(input.SearchPattern) ? "*.*" : input.SearchPattern, input.SearchOption) .OrderBy(name => name, StringComparer.OrdinalIgnoreCase)); if (log != null) { foreach (var file in files) { log.Information("- {0}".InvariantFormat(file)); } } } else if (!input.IsOptional && throwWhenMissingAndNotOptional) { // Else if the path does not exists and is not optional throw new FileNotFoundException( "Could not find the file for non option input spec: Path:{0}, SearchPattern:{1}, Options:{2}".InvariantFormat( path, input.SearchPattern, input.SearchOption), path); } return(files); }
protected override void build(Tensors inputs) { Shape input_shape = inputs.shape; var ndims = input_shape.ndim; foreach (var(idx, x) in enumerate(axis)) { if (x < 0) { axis[idx] = ndims + x; } } var axis_to_dim = new Dictionary <int, int>(); foreach (var x in axis) { axis_to_dim[x] = (int)input_shape[x]; } inputSpec = new InputSpec(ndim: ndims, axes: axis_to_dim); var param_dtype = DType == TF_DataType.DtInvalid ? TF_DataType.TF_FLOAT : DType; var param_shape = inputSpec.AllAxisDim; if (scale) { gamma = add_weight("gamma", param_shape, dtype: param_dtype, initializer: gamma_initializer, trainable: true); } if (center) { beta = add_weight("beta", param_shape, dtype: param_dtype, initializer: beta_initializer, trainable: true); } _fused = _fused_can_be_used(ndims); built = true; }
public void Bed_Counts_Bases_Of_FP_Events() { var outputDirectory = Path.GetRandomFileName().ToDirectoryInfo(); var inputSpecs = InputSpec.GenerateDefaultInputSpecs(false).Select(i => InputSpec.Create(i.VariantType, i.BinSizes, 10000, i.PercentDistance, i.ExcludedFilters, i.IncludedFilters, IncludeBedFile.CreateFromBedFile(Bed))) .ToDictionary(i => i.VariantType, i => i); var wittyerSettings = WittyerSettings.Create(outputDirectory, Truth, Query, ImmutableList <ISamplePair> .Empty, EvaluationMode.CrossTypeAndSimpleCounting, inputSpecs); var(_, query, truth) = MainLauncher.GenerateResults(wittyerSettings).EnumerateSuccesses().First(); var results = MainLauncher.GenerateSampleMetrics(truth, query, false, inputSpecs); // should be end of bed - start of query + 1 = 149835000 - 145395620 + 1 = 4439381 MultiAssert.Equal(4439381U, results.OverallStats[StatsType.Base].QueryStats.TrueCount); MultiAssert.Equal(4439381U, results.OverallStats[StatsType.Base].TruthStats.TrueCount); MultiAssert.Equal(0U, results.OverallStats[StatsType.Event].QueryStats.TrueCount); MultiAssert.Equal(0U, results.OverallStats[StatsType.Event].TruthStats.TrueCount); MultiAssert.AssertAll(); }
public void CrossType_ComplexBed_Works() { var outputDirectory = Path.GetRandomFileName().ToDirectoryInfo(); var inputSpecs = InputSpec.CreateSpecsFromString( File.ReadAllText(Config.FullName), IncludeBedFile.CreateFromBedFile(Bed)) ?.ToDictionary(i => i.VariantType, i => i) ?? new Dictionary <WittyerType, InputSpec>(); var wittyerSettings = WittyerSettings.Create(outputDirectory, Truth, Query, ImmutableList <ISamplePair> .Empty, EvaluationMode.CrossTypeAndSimpleCounting, inputSpecs); var(_, query, truth) = MainLauncher.GenerateResults(wittyerSettings) .EnumerateSuccesses().First(); var results = MainLauncher .GenerateSampleMetrics(truth, query, false, inputSpecs); var baseStats = results.DetailedStats[WittyerType.Deletion].OverallStats[StatsType.Base]; MultiAssert.Equal(206678U, baseStats.QueryStats.TrueCount); MultiAssert.Equal(206678U, baseStats.TruthStats.TrueCount); MultiAssert.AssertAll(); }
public BasicRnnCell(int num_units, Func <Tensor, string, Tensor> activation = null, bool?reuse = null, string name = null, TF_DataType dtype = TF_DataType.DtInvalid) : base(_reuse: reuse, name: name, dtype: dtype) { // Inputs must be 2-dimensional. inputSpec = new InputSpec(ndim: 2); _num_units = num_units; if (activation == null) { _activation = math_ops.tanh; } else { _activation = activation; } }
public void SingleFixedSizeRecordWithFixedSizeFields() { var spec = new InputSpec { Fields = new List <InputField> { new InputField { Name = "first_name", Size = 30 }, new InputField { Name = "last_name", Size = 20 }, new InputField { Name = "date_of_birth", Size = 8 }, } }; var results = sut.Process(spec).ToList(); Assert.AreEqual(2, results.Count); Assert.AreEqual(Resources.Phase1Test1Record, results[0]); Assert.AreEqual(Resources.Phase1Test1Extensions, results[1]); }
public void UsingDefaultConfigFileProducesSameInputSpecsAsCommandLineDefaults() { var defaultConfigFilePath = Path.Combine(Environment.CurrentDirectory, "Config-default.json"); var defaultInputSpecsCommandLine = InputSpec .GenerateDefaultInputSpecs(true, WittyerType.AllTypes.OrderBy(s => s.Name)) .ToImmutableDictionary(x => x.VariantType, x => x); var defaultInputSpecsConfig = JsonConvert .DeserializeObject <IEnumerable <InputSpec> >(File.ReadAllText(defaultConfigFilePath), InputSpecConverter.Create()).OrderBy(x => x.VariantType.Name) .ToImmutableDictionary(x => x.VariantType, x => x); foreach (var(key, value) in defaultInputSpecsConfig) { if (!defaultInputSpecsCommandLine.TryGetValue(key, out var value2)) { MultiAssert.Equal(string.Empty, key.ToString()); } if (!value.Equals(value2)) { MultiAssert.Equal(value, value2); } } MultiAssert.AssertAll(); }
/// <summary>Creates a source dependency.</summary> /// <param name="context">The context.</param> /// <param name="inputSpec">The input spec.</param> /// <returns>The <see cref="CacheSourceDependency"/>.</returns> internal static CacheSourceDependency Create(IWebGreaseContext context, InputSpec inputSpec) { if (context == null) { throw new ArgumentNullException("context"); } if (inputSpec == null) { throw new ArgumentNullException("inputSpec"); } var csd = new CacheSourceDependency(); if (Directory.Exists(inputSpec.Path)) { inputSpec.Path.EnsureEndSeparator(); } csd.InputSpecHash = GetInputSpecHash(context, inputSpec); inputSpec.Path = inputSpec.Path.MakeRelativeToDirectory(context.Configuration.SourceDirectory); csd.InputSpec = inputSpec; return(csd); }
protected override void build(Tensors inputs) { TensorShape input_shape = inputs.shape; var ndims = input_shape.ndim; foreach (var(idx, x) in enumerate(axis)) { if (x < 0) { axis[idx] = ndims + x; } } fused = ndims == 4; if (fused) { if (Enumerable.SequenceEqual(axis, new int[] { 1 })) { _data_format = "NCHW"; } else if (Enumerable.SequenceEqual(axis, new int[] { 3 })) { _data_format = "NHWC"; } else { throw new ValueError($"Unsupported axis, fused batch norm only supports axis == [1] or axis == [3]"); } } var axis_to_dim = new Dictionary <int, int>(); foreach (var x in axis) { axis_to_dim[x] = input_shape[x]; } inputSpec = new InputSpec(ndim: ndims, axes: axis_to_dim); var param_dtype = DType == TF_DataType.DtInvalid ? TF_DataType.TF_FLOAT : DType; var param_shape = inputSpec.AllAxisDim; if (scale) { gamma = add_weight("gamma", param_shape, dtype: param_dtype, initializer: gamma_initializer, trainable: true); } else { throw new NotImplementedException("add_weight gamma"); } if (center) { beta = add_weight("beta", param_shape, dtype: param_dtype, initializer: beta_initializer, trainable: true); } else { throw new NotImplementedException("add_weight beta"); } moving_mean = add_weight("moving_mean", param_shape, dtype: param_dtype, initializer: moving_mean_initializer, synchronization: VariableSynchronization.OnRead, aggregation: VariableAggregation.Mean, trainable: false); moving_variance = add_weight("moving_variance", shape: param_shape, dtype: param_dtype, initializer: moving_variance_initializer, synchronization: VariableSynchronization.OnRead, aggregation: VariableAggregation.Mean, trainable: false); if (renorm) { throw new NotImplementedException("build when renorm is true"); } built = true; }