private static int TestReLU(float slope, bool usePackingLayout) { using var a = TestUtil.TestUtil.RandomMat(6, 7, 8); using var pd = new ParamDict(); pd.Set(0, slope);//slope using var tmp = new Mat(); var weights = new[] { tmp }; using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = true, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = usePackingLayout }; var ret = TestUtil.TestUtil.TestLayer <ReLU>("ReLU", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_relu failed slope={slope} use_packing_layout={usePackingLayout}"); } weights?.DisposeElement(); return(ret); }
private static int TestDeconvolutionInt8(int w, int h, int c, int outch, int kernel, int dilation, int stride, int pad, int bias) { using var a = TestUtil.TestUtil.RandomMat(w, h, c); using var pd = new ParamDict(); pd.Set(0, outch); // num_output pd.Set(1, kernel); // kernel_w pd.Set(2, dilation); // dilation_w pd.Set(3, stride); // stride_w pd.Set(4, pad); // pad_w pd.Set(5, bias); // bias_term pd.Set(6, outch * c * kernel * kernel); pd.Set(8, 1); // int8_scale_term var weights = new Mat[bias > 0 ? 4 : 3]; weights[0] = TestUtil.TestUtil.RandomMat(outch * c * kernel * kernel); if (bias > 0) { weights[1] = TestUtil.TestUtil.RandomMat(outch); weights[2] = TestUtil.TestUtil.RandomMat(outch); weights[3] = TestUtil.TestUtil.RandomMat(1); } else { weights[1] = TestUtil.TestUtil.RandomMat(outch); weights[2] = TestUtil.TestUtil.RandomMat(1); } using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = false, UseInt8Inference = true, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = false }; var ret = TestUtil.TestUtil.TestLayer <Deconvolution>("Deconvolution", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_convolution failed w={w} h={h} c={c} outch={outch} kernel={kernel} dilation={dilation} stride={stride} pad={pad} bias={bias}"); } return(ret); }
private static int TestConvolutionDepthWise(int w, int h, int c, int outch, int kernel, int dilation, int stride, int pad, int bias, int group, bool usePackingLayout) { using var a = TestUtil.TestUtil.RandomMat(w, h, c); using var pd = new ParamDict(); pd.Set(0, outch); // num_output pd.Set(1, kernel); // kernel_w pd.Set(2, dilation); // dilation_w pd.Set(3, stride); // stride_w pd.Set(4, pad); // pad_w pd.Set(5, bias); // bias_term pd.Set(6, outch / group * c / group * kernel * kernel * group); pd.Set(7, group); var weights = new Mat[bias > 0 ? 2 : 1]; weights[0] = TestUtil.TestUtil.RandomMat(outch / group * c / group * kernel * kernel * group); weights[1] = TestUtil.TestUtil.RandomMat(outch); using var vector = new StdVector <Mat>(weights); using var mb = new ModelBinFromMatArray(vector); using var opt = new Option { NumThreads = 1, UseVulkanCompute = true, UseInt8Inference = false, UseFP16Packed = false, UseFP16Storage = false, UseFP16Arithmetic = false, UseInt8Storage = false, UseInt8Arithmetic = false, UsePackingLayout = usePackingLayout }; var ret = TestUtil.TestUtil.TestLayer <ConvolutionDepthWise>("ConvolutionDepthWise", pd, mb, opt, a); if (ret != 0) { Console.Error.WriteLine($"test_convolutiondepthwise failed w={w} h={h} c={c} outch={outch} kernel={kernel} dilation={dilation} stride={stride} pad={pad} bias={bias} group={group} use_packing_layout={usePackingLayout}"); } weights?.DisposeElement(); return(ret); }