private static int TestReLU(float slope, bool usePackingLayout) { using var a = TestUtil.TestUtil.RandomMat(6, 7, 8); using var pd = new ParamDict(); pd.Set(0, slope);//slope using var tmp = new Mat(); var weights = new[] { tmp }; using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = true, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = usePackingLayout }; var ret = TestUtil.TestUtil.TestLayer <ReLU>("ReLU", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_relu failed slope={slope} use_packing_layout={usePackingLayout}"); } weights?.DisposeElement(); return(ret); }
public static string ReturnParameter(HttpSessionStateBase Session, string ParameterName) { if (ParameterName.StartsWith("Order.")) { return(getOrderParameters(Session, ParameterName)); } if (ParameterName.StartsWith("Tax.")) { return(getTaxParameters(Session, ParameterName)); } Dictionary <string, string> ParamDict; var returnDict = ReturnParameters(Session); if (returnDict == null) { return(null); } ParamDict = (Dictionary <string, string>)returnDict; string keyvalue; if (ParamDict.TryGetValue(ParameterName, out keyvalue)) { return(keyvalue); } else { return(""); } }
/// <summary> /// 增加查询参数,并输出参数名,参数名会根据LastParamIndex递增 /// </summary> /// <param name="v">参数值</param> /// <returns>返回参数名</returns> private string AddParam(object v) { //这里用完 _lastParamIndex 立即加一 防止下个参数错误 var paramName = $"Sql{_lastParamIndex++}Param"; ParamDict.Add(paramName, v); return(paramName); }
/// <summary> /// 增加查询参数,并输出参数名,参数名会根据LastParamIndex递增 /// </summary> /// <param name="v">参数值</param> /// <returns>返回参数名</returns> private string AddParam(object v) { //这里用完 _lastParamIndex 立即加一 防止下个参数错误 var paramName = "SQL_P_" + _lastParamIndex++; ParamDict.Add(paramName, v); return(paramName); }
public static ParamFile GetFile(string name) { if (!string.IsNullOrEmpty(name) && ParamDict.ContainsKey(name)) { return(ParamDict[name]); } return(null); }
public async Task <ActionResult> DeleteConfirmed(int id) { ParamDict paramDict = await db.ParamDict.FindAsync(id); db.ParamDict.Remove(paramDict); await db.SaveChangesAsync(); return(RedirectToAction("Index")); }
public static void AddFile(ParamNode node) { string name = Path.GetFileNameWithoutExtension(node.AbsolutePath); if (!ParamDict.ContainsKey(name)) { ParamDict.Add(name, node.Param); HandleParam(name, node.Param); } }
public async Task <ActionResult> Edit([Bind(Include = "Id,ParamName,SubItemName")] ParamDict paramDict) { if (ModelState.IsValid) { db.Entry(paramDict).State = EntityState.Modified; await db.SaveChangesAsync(); return(RedirectToAction("Index")); } return(View(paramDict)); }
private static int TestDeconvolutionInt8(int w, int h, int c, int outch, int kernel, int dilation, int stride, int pad, int bias) { using var a = TestUtil.TestUtil.RandomMat(w, h, c); using var pd = new ParamDict(); pd.Set(0, outch); // num_output pd.Set(1, kernel); // kernel_w pd.Set(2, dilation); // dilation_w pd.Set(3, stride); // stride_w pd.Set(4, pad); // pad_w pd.Set(5, bias); // bias_term pd.Set(6, outch * c * kernel * kernel); pd.Set(8, 1); // int8_scale_term var weights = new Mat[bias > 0 ? 4 : 3]; weights[0] = TestUtil.TestUtil.RandomMat(outch * c * kernel * kernel); if (bias > 0) { weights[1] = TestUtil.TestUtil.RandomMat(outch); weights[2] = TestUtil.TestUtil.RandomMat(outch); weights[3] = TestUtil.TestUtil.RandomMat(1); } else { weights[1] = TestUtil.TestUtil.RandomMat(outch); weights[2] = TestUtil.TestUtil.RandomMat(1); } using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = false, UseInt8Inference = true, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = false }; var ret = TestUtil.TestUtil.TestLayer <Deconvolution>("Deconvolution", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_convolution failed w={w} h={h} c={c} outch={outch} kernel={kernel} dilation={dilation} stride={stride} pad={pad} bias={bias}"); } return(ret); }
public async Task <ActionResult> Create([Bind(Include = "Id,ParamName,SubItemName")] ParamDict paramDict) { if (ModelState.IsValid) { db.ParamDict.Add(paramDict); await db.SaveChangesAsync(); return(RedirectToAction("Index")); } return(View(paramDict)); }
public async Task <ActionResult> Details(int?id) { if (id == null) { return(new HttpStatusCodeResult(HttpStatusCode.BadRequest)); } ParamDict paramDict = await db.ParamDict.FindAsync(id); if (paramDict == null) { return(HttpNotFound()); } return(View(paramDict)); }
static void Main(string[] args) { var _pd = new ParamDict(); _pd["00.01.02"] = 1; _pd["k1"] = 1; _pd["k2"] = 2; _pd["k3"] = 3; _pd["k4"] = 4; _pd["k5"] = 5; var _element = _pd["00.01"]; RuntimeCompiler(); }
private static int DetectShuffleNetV2(NcnnDotNet.OpenCV.Mat bgr, List <float> clsScores) { using (var shuffleNetV2 = new Net()) { if (Ncnn.IsSupportVulkan) { shuffleNetV2.Opt.UseVulkanCompute = true; } // https://github.com/miaow1988/ShuffleNet_V2_pytorch_caffe // models can be downloaded from https://github.com/miaow1988/ShuffleNet_V2_pytorch_caffe/releases shuffleNetV2.LoadParam("shufflenet_v2_x0.5.param"); shuffleNetV2.LoadModel("shufflenet_v2_x0.5.bin"); using var @in = Mat.FromPixelsResize(bgr.Data, PixelType.Bgr, bgr.Cols, bgr.Rows, 224, 224); var normVals = new[] { 1 / 255.0f, 1 / 255.0f, 1 / 255.0f }; @in.SubstractMeanNormalize(null, normVals); using var ex = shuffleNetV2.CreateExtractor(); ex.Input("data", @in); using var @out = new Mat(); ex.Extract("fc", @out); // manually call softmax on the fc output // convert result into probability // skip if your model already has softmax operation { using var softmax = Ncnn.CreateLayer("Softmax"); using var pd = new ParamDict(); softmax.LoadParam(pd); softmax.ForwardInplace(@out, shuffleNetV2.Opt); } using var @out2 = @out.Reshape(@out.W * @out.H * @out.C); clsScores.Capacity = @out2.W; for (var j = 0; j < @out2.W; j++) { clsScores.Add(@out2[j]); } } return(0); }
private static int TestConvolutionDepthWise(int w, int h, int c, int outch, int kernel, int dilation, int stride, int pad, int bias, int group, bool usePackingLayout) { using var a = TestUtil.TestUtil.RandomMat(w, h, c); using var pd = new ParamDict(); pd.Set(0, outch); // num_output pd.Set(1, kernel); // kernel_w pd.Set(2, dilation); // dilation_w pd.Set(3, stride); // stride_w pd.Set(4, pad); // pad_w pd.Set(5, bias); // bias_term pd.Set(6, outch / group * c / group * kernel * kernel * group); pd.Set(7, group); var weights = new Mat[bias > 0 ? 2 : 1]; weights[0] = TestUtil.TestUtil.RandomMat(outch / group * c / group * kernel * kernel * group); weights[1] = TestUtil.TestUtil.RandomMat(outch); using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = true, UseInt8Inference = false, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = usePackingLayout }; var ret = TestUtil.TestUtil.TestLayer <ConvolutionDepthWise>("ConvolutionDepthWise", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_convolutiondepthwise failed w={w} h={h} c={c} outch={outch} kernel={kernel} dilation={dilation} stride={stride} pad={pad} bias={bias} group={group} use_packing_layout={usePackingLayout}"); } weights?.DisposeElement(); return(ret); }
public IFuture Curry(ParamDict paramDict) { return new OpenDocumentSensitivePrioritizedFuture(this.prioritizer, this.prototype.Curry(paramDict), this.openDocumentFuture); }
public IFuture Curry(ParamDict paramDict) { return(this); }
public IFuture Curry(ParamDict paramDict) { return(new TransparencyFuture(transparencyOptions, antialiasedPrototype.Curry(paramDict), exactColorPrototype.Curry(paramDict))); }
public static int TestLayer <T>(int typeIndex, ParamDict pd, ModelBin mb, Option opt, Mat a, float epsilon = 0.001f) where T : Layer, new() { using (var op = Ncnn.CreateLayer <T>(typeIndex)) { if (!op.SupportPacking) { opt.UsePackingLayout = false; } VulkanDevice vkDev = null; VkWeightBufferAllocator gWeightVkAllocator = null; VkWeightStagingBufferAllocator gWeightStagingVkAllocator = null; VkBlobBufferAllocator gBlobVkAllocator = null; VkStagingBufferAllocator gStagingVkAllocator = null; if (Ncnn.IsSupportVulkan) { vkDev = Ncnn.GetGpuDevice(); gWeightVkAllocator = new VkWeightBufferAllocator(vkDev); gWeightStagingVkAllocator = new VkWeightStagingBufferAllocator(vkDev); gBlobVkAllocator = new VkBlobBufferAllocator(vkDev); gStagingVkAllocator = new VkStagingBufferAllocator(vkDev); opt.BlobVkAllocator = gBlobVkAllocator; opt.WorkspaceVkAllocator = gBlobVkAllocator; opt.StagingVkAllocator = gStagingVkAllocator; if (!vkDev.Info.SupportFP16Storage) { opt.UseFP16Storage = false; } if (!vkDev.Info.SupportFP16Packed) { opt.UseFP16Packed = false; } op.VkDev = vkDev; } op.LoadParam(pd); op.LoadModel(mb); op.CreatePipeline(opt); if (Ncnn.IsSupportVulkan) { if (opt.UseVulkanCompute) { using var cmd = new VkTransfer(vkDev) { WeightVkAllocator = gWeightVkAllocator, StagingVkAllocator = gWeightStagingVkAllocator }; op.UploadModel(cmd, opt); cmd.SubmitAndWait(); gWeightStagingVkAllocator?.Clear(); } } using var b = new Mat(); ((T)op).Forward(a, b, opt); var c = new Mat(); { Mat a4; if (opt.UsePackingLayout) { a4 = new Mat(); Ncnn.ConvertPacking(a, a4, 4, opt); } else { a4 = a; } var c4 = new Mat(); op.Forward(a4, c4, opt); if (opt.UsePackingLayout) { Ncnn.ConvertPacking(c4, c, 1, opt); c4.Dispose(); } else { c?.Dispose(); c = c4; } } Mat d = null; try { if (Ncnn.IsSupportVulkan) { d = new Mat(); if (opt.UseVulkanCompute) { using var a4 = new Mat(); Mat a4_fp16 = null; try { // pack Ncnn.ConvertPacking(a, a4, 4, opt); // fp16 if (opt.UseFP16Storage || a4.ElemPack == 4 && opt.UseFP16Packed) { a4_fp16 = new Mat(); Ncnn.CastFloat32ToFloat16(a4, a4_fp16, opt); } else { a4_fp16 = a4; } // upload using var a4_fp16_gpu = new VkMat(); a4_fp16_gpu.CreateLike(a4_fp16, gBlobVkAllocator, gStagingVkAllocator); a4_fp16_gpu.PrepareStagingBuffer(); a4_fp16_gpu.Upload(a4_fp16); // forward using var cmd = new VkCompute(vkDev); cmd.RecordUpload(a4_fp16_gpu); using var d4_fp16_gpu = new VkMat(); op.Forward(a4_fp16_gpu, d4_fp16_gpu, cmd, opt); d4_fp16_gpu.PrepareStagingBuffer(); cmd.RecordDownload(d4_fp16_gpu); cmd.SubmitAndWait(); // download using var d4_fp16 = new Mat(); d4_fp16.CreateLike(d4_fp16_gpu); d4_fp16_gpu.Download(d4_fp16); // fp32 Mat d4 = null; try { if (opt.UseFP16Storage || d4_fp16.ElemPack == 4 && opt.UseFP16Packed) { d4 = new Mat(); Ncnn.CastFloat16ToFloat32(d4_fp16, d4, opt); } else { d4 = d4_fp16; } // unpack Ncnn.ConvertPacking(d4, d, 1, opt); } finally { d4?.Dispose(); } } finally { a4_fp16?.Dispose(); } } } op.DestroyPipeline(opt); // Must dispose here!! op.Dispose(); if (Ncnn.IsSupportVulkan) { gBlobVkAllocator.Clear(); gStagingVkAllocator.Clear(); gWeightVkAllocator.Clear(); gBlobVkAllocator?.Dispose(); gStagingVkAllocator?.Dispose(); gWeightVkAllocator?.Dispose(); gWeightStagingVkAllocator?.Dispose(); } if (CompareMat(b, c, epsilon) != 0) { Console.Error.WriteLine("test_layer failed cpu"); return(-1); } if (Ncnn.IsSupportVulkan) { if (opt.UseVulkanCompute && CompareMat(b, d, epsilon) != 0) { Console.Error.WriteLine("test_layer failed gpu"); return(-1); } } } finally { c?.Dispose(); d?.Dispose(); } } return(0); }
public IFuture Curry(ParamDict paramDict) { return(new OpenDocumentSensitivePrioritizedFuture(prioritizer, prototype.Curry(paramDict), openDocumentFuture)); }
public IFuture Curry(ParamDict paramDict) { return new MemCacheFuture(this.cache, this.prototype.Curry(paramDict)); }
public IFuture Curry(ParamDict paramDict) { return this; }
/// <summary> /// 清除文本内容和参数内容 /// </summary> public void Clear() { ParamDict.Clear(); SqlText.Remove(0, SqlText.Length); }
public IFuture Curry(ParamDict paramDict) { return new TransparencyFuture(this.transparencyOptions, this.antialiasedPrototype.Curry(paramDict), this.exactColorPrototype.Curry(paramDict)); }
public IFuture Curry(ParamDict paramDict) { return Asynchronizer.MakeFuture(this.scheduler, this.innerPrototype.Curry(paramDict)); }
public static void Unload() { ParamDict.Clear(); }
public IFuture Curry(ParamDict paramDict) { return(new DiskCacheFuture(cache, prototype.Curry(paramDict))); }
public IFuture Curry(ParamDict paramDict) { IFuture[] futureParams = Array.ConvertAll<IFuturePrototype, IFuture>(this.prototypeParams, (IFuturePrototype p) => p.Curry(paramDict)); return new ApplyFuture(this.verb, futureParams); }
public IFuture Curry(ParamDict paramDict) { return new ConstantFuture(paramDict[this.name]); }
public IFuture Curry(ParamDict paramDict) { return(new ConstantFuture(paramDict[name])); }
public static int TestLayer <T>(string layerType, ParamDict pd, ModelBin mb, Option opt, Mat a, float epsilon = 0.001f) where T : Layer, new() { return(TestLayer <T>(Ncnn.LayerToIndex(layerType), pd, mb, opt, a, epsilon)); }