public void TestSuite() { var dim = N12.Rep; var trials = Pow2.T16; var maxVal = (ulong)Pow2.T07; var maxVec = BlockVector.Alloc(dim, maxVal); var buckets = new ulong[maxVal]; var suite = Rng.WyHash64Suite(dim); for (var i = 0; i < trials; i++) { var vector = suite.Next <ulong>(); var contracted = vector.Contract(maxVec); for (var j = 0; j < dim; j++) { var x = contracted[j]; Claim.lteq(x, maxVal); ++buckets[x]; } } var indices = Random.Array <int>(10, leftopen(0, buckets.Length)); var bucketSample = buckets.Values(indices); var avg = (double)gmath.avg <ulong>(bucketSample); var deltas = bucketSample.Map(c => (math.abs(1 - ((double)c / avg)).Round(4))); var tolerance = .1; var intolerant = deltas.Where(x => x > tolerance).Count(); Claim.eq(intolerant, 0); }
void contract64f_bench() { var max = 250000ul; var n = SampleSize; var sw = stopwatch(); var src = Random.Array <ulong>(n); sw.Start(); for (var i = 0; i < n; i++) { contract(src[i], max); } sw.Stop(); var time1 = OpTime.Define(n, snapshot(sw), "contract-f64"); sw.Reset(); sw.Start(); for (var i = 0; i < n; i++) { src[i].Contract(max); } sw.Stop(); var time2 = OpTime.Define(n, snapshot(sw), "contract-baseline"); TracePerf((time1, time2)); }
public void sar_64i() { var src = Random.Array <long>(SampleSize); var offset = Random.Array <int>(SampleSize, closed(0, (int)SizeOf <long> .BitSize)); iter(SampleSize, i => Claim.eq(src[i] >> offset[i], gbits.sar(src[i], offset[i]))); }
public void sar_8i() { var src = Random.Array <sbyte>(SampleSize); var offset = Random.Array <int>(SampleSize, closed(0, (int)SizeOf <sbyte> .BitSize)); iter(SampleSize, i => Claim.eq((sbyte)(src[i] >> offset[i]), gbits.sar(src[i], offset[i]))); }
public void sal_32i() { var src = Random.Array <int>(SampleSize); var offset = Random.Array <int>(SampleSize, closed(0, (int)SizeOf <int> .BitSize)); iter(SampleSize, i => Claim.eq(src[i] << offset[i], gbits.sal(src[i], offset[i]))); }
void run_mean_bench() { var cycles = Pow2.T12; var samples = Pow2.T14; var src = Random.Array <long>(samples, closed(-2000L, 2000L)).Convert <double>(); var ds = Dataset.Load(src); var dst = 0.0; var last = 0.0; var sw1 = stopwatch(); for (var i = 0; i < cycles; i++) { last = ds.Mean(ref dst); } var t1 = OpTime.Define(cycles * samples, snapshot(sw1), "mkl-ssmean"); var sw2 = stopwatch(); for (var i = 0; i < cycles; i++) { last = src.Avg(); } var t2 = OpTime.Define(cycles * samples, snapshot(sw2), "direct"); Collect((t1, t2)); }
void xmm_create <T>() where T : unmanaged { var cellcount = XMM.CellCount <T>(); Claim.eq(cellcount, Vec128 <T> .Length); var xmmWidth = XMM.BitWidth; Claim.eq(xmmWidth, Vec128 <T> .ByteCount * 8); var cellwidth = XMM.CellWidth <T>(); Claim.eq(cellwidth, bitsize <T>()); var celldata = Random.Array <T>(cellcount); var xmm = XMM.FromCells(celldata); for (var i = 0; i < cellcount; i++) { Claim.eq(xmm.Cell <T>(i), celldata[i]); } var bitmap = XMM.BitMap <T>(); Claim.eq(bitmap.CellCount, cellcount); Claim.eq(bitmap.CellWidth, cellwidth); check_bitmap(bitmap); for (int i = 0, k = 0; i < bitmap.CellCount; i++) { ref readonly var index = ref bitmap.Cell(i * bitmap.CellWidth);
public void mean() { var src = Random.Array <long>(Pow2.T14, closed(-2000L, 2000L)); var expect = src.Avg(); var actual = (long)Dataset.Load(src.Convert <double>()).Mean()[0]; Claim.eq(expect, actual); }
public void sumvals() { var src = Random.Array <double>(Pow2.T14); var expect = src.Sum().Round(4); var actual = Dataset.Load(src).Sum()[0].Round(4); Claim.eq(expect, actual); }
protected K[] RandArray <K>(bool nonzero = false) where K : struct { var config = Config.Get <K>(); return(nonzero ? Random.NonZeroArray <K>(config.SampleSize, SampleDomain <K>()) : Random.Array <K>(config.SampleSize, SampleDomain <K>())); }
public void minval() { var samplesize = Pow2.T14; var s1Range = Interval.closed(350.0, 1000.0); var s1 = Random.Array <double>(samplesize, s1Range); var s1Max = Observations.Load(s1).Max()[0]; NumericClaims.neq(s1Max, 0.0); var zeroCount = s1.Count(x => x == 0); Notify($"Found {zeroCount} zeroes"); }
public void minval() { var samplesize = Pow2.T14; var s1Range = closed(350.0, 1000.0); var s1 = Random.Array <double>(samplesize, s1Range); var s1Max = Dataset.Load(s1).Max()[0]; Claim.neq(s1Max, 0.0); var zeroCount = s1.Count(x => x == 0); Trace($"Found {zeroCount} zeroes"); }
public void pack_split_u16() { var len = Pow2.T08; var lhs = Random.Array <byte>(len); var rhs = Random.Array <byte>(len); for (var i = 0; i < len; i++) { var dst = Bits.pack(lhs[i], rhs[i]); (var x0, var x1) = Bits.split(dst); Claim.eq(x0, lhs[i]); Claim.eq(x1, rhs[i]); } }
protected void VerifyOp <T>(AsmBinOp <T> asmop, Func <T, T, T> refop, int?n = null) where T : unmanaged { var lhs = Random.Array <T>(n ?? SampleSize); var rhs = Random.Array <T>(n ?? SampleSize); var setup = from args in lhs.Zip(rhs) let expect = refop(args.First, args.Second) let actual = asmop(args.First, args.Second) select(expect, actual, success: expect.Equals(actual)); var execute = from r in setup where !r.success select(r.expect, r.actual); Claim.eq(0, execute.Count()); }
protected void VerifyOp <T>(AsmUnaryOp <T> asmop, Func <T, T> refop, int?n = null) where T : unmanaged { TypeCaseStart <T>(); var src = Random.Array <T>(n ?? SampleSize); var setup = from arg in src let expect = refop(arg) let actual = asmop(arg) select(expect, actual, success: expect.Equals(actual)); var execute = from r in setup where !r.success select(r.expect, r.actual); Claim.eq(0, execute.Count()); TypeCaseEnd <T>(); }
public void radixSort() { var obs = Pow2.T10; var dim = Pow2.T08; var range = Interval.closed(-20f, 20f); var src = Random.Array <float>(dim * obs, range); var sample = Observations.Load(src, dim); var sorted = sample.RadixSort(); for (var i = 0; i < obs; i++) { var v = sorted.Observation(i); for (var j = 0; j < dim - 1; j++) { ClaimNumeric.lteq(v[j], v[j + 1]); } } }
public void CreateMt2203Generators() { var gencount = Pow2.T08; var samplesize = Pow2.T16; var seeds = Random.Array <uint>(gencount); var streams = new MklRng[gencount]; for (var i = 0; i < gencount; i++) { streams[i] = rng.mt2203(seeds[i], i); } var bufferF64 = new double[samplesize]; var bufferU32 = new uint[samplesize]; var bufferI32 = new int[samplesize]; var ufRange = closed(1.0, 250.0); for (var i = 0; i < gencount; i++) { var stream = streams[i]; sample.uniform(stream, ufRange, bufferF64); Dataset.Load(bufferF64, 1).Extrema(); var max = Dataset.Load(bufferF64, 1).Max()[0]; Claim.lteq(max, ufRange.Right); Claim.neq(max, 0); sample.bits(stream, bufferU32); sample.bernoulli(stream, .40, bufferI32); for (var j = 0; j < samplesize; j++) { Claim.yea(bufferI32[j] == 0 || bufferI32[j] == 1); } sample.gaussian(stream, .75, .75, bufferF64); sample.laplace(stream, .5, .5, bufferF64); } for (var i = 0; i < gencount; i++) { streams[i].Dispose(); } }
void HistoTest<T>(Interval<T> domain, T? grain = null) where T : struct { var width = gmath.sub(domain.Right, domain.Left); var data = Random.Array<T>(Pow2.T14, domain); var histo = new Histogram<T>(domain, grain ?? (gmath.div(width,convert<T>(100)))); histo.Deposit(data); var buckets = histo.Buckets().ReadOnly(); var total = (int)buckets.TotalCount(); babble($"Histogram domain: {histo.Domain}"); babble($"Histogram grain: {histo.BinWidth}"); babble($"Histogram bucket count: {buckets.Length}"); babble($"Total number of samples: {data.Length}"); babble($"Sum of bucket counts: {total}"); Claim.eq(total, data.Length); }
public void contract32u_bench() { var n = SampleSize; var src = Random.Array <uint>(n); var result = measure(CycleCount, "divide", "contract", _ => { var last = 0u; for (var i = 0; i < n; i++) { last = src[i] / 100; } }, _ => { var last = 0u; for (var i = 0; i < n; i++) { last = src[i].Contract(100u); } } ); TracePerf(result); }
public void contract16u_bench() { var n = SampleSize; var src = Random.Array <ushort>(n); var result = measure(CycleCount, "divide", "contract", _ => { ushort last = (ushort)0u; for (var i = 0; i < n; i++) { last = (ushort)(src[i] / 100); } }, _ => { ushort last = 0; for (var i = 0; i < n; i++) { last = src[i].Contract(100); } } ); TracePerf(result); }
public void contract8u_bench() { var n = SampleSize; var src = Random.Array <byte>(n); var result = measure(CycleCount, "divide", "contract", _ => { byte last = 0; for (var i = 0; i < n; i++) { last = (byte)(src[i] / 100); } }, _ => { var last = 0; for (var i = 0; i < n; i++) { last = (byte)(src[i].Contract(100)); } } ); TracePerf(result); }