public void ToArray([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new NativeStream(count, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; fillInts.Schedule(count, batchSize).Complete(); var array = stream.ToNativeArray <int>(Allocator.Temp); int itemIndex = 0; for (int i = 0; i != count; ++i) { for (int j = 0; j < i; ++j) { Assert.AreEqual(j, array[itemIndex]); itemIndex++; } } array.Dispose(); stream.Dispose(); }
public void ItemCount([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new NativeStream(count, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; fillInts.Schedule(count, batchSize).Complete(); ExpectedCount(ref stream, count * (count - 1) / 2); stream.Dispose(); }
public void ItemCount([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new NativeStream(count, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; fillInts.Schedule(count, batchSize).Complete(); Assert.AreEqual(count * (count - 1) / 2, stream.ComputeItemCount()); stream.Dispose(); }
public void ItemCount([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new BlockStream(count, 0xd3e8afdd); var fillInts = new WriteInts { Writer = stream }; fillInts.Schedule(count, batchSize).Complete(); Assert.AreEqual(count * (count - 1) / 2, stream.ComputeItemCount()); stream.Dispose(); }
public void ParallelWriteThrows() { var stream = new NativeStream(100, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; var writerJob = fillInts.Schedule(100, 16); Assert.Throws <InvalidOperationException>(() => fillInts.Schedule(100, 16)); writerJob.Complete(); stream.Dispose(); }
public void ParallelWriteThrows() { var stream = new BlockStream(100, 0xd3e8afdd); var fillInts = new WriteInts { Writer = stream }; var writerJob = fillInts.Schedule(100, 16); Assert.Throws <InvalidOperationException>(() => fillInts.Schedule(100, 16)); writerJob.Complete(); stream.Dispose(); }
public void DisposeAfterSchedule() { var stream = new BlockStream(100, 0xd3e8afdd); var fillInts = new WriteInts { Writer = stream }; var writerJob = fillInts.Schedule(100, 16); var disposeJob = stream.Dispose(writerJob); Assert.IsFalse(stream.IsCreated); disposeJob.Complete(); }
public void DisposeAfterSchedule() { var stream = new NativeStream(100, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; var writerJob = fillInts.Schedule(100, 16); var disposeJob = stream.Dispose(writerJob); Assert.IsFalse(stream.IsCreated); disposeJob.Complete(); }
public void PopulateInts([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new NativeStream(count, Allocator.TempJob); var fillInts = new WriteInts { Writer = stream.AsWriter() }; var jobHandle = fillInts.Schedule(count, batchSize); var compareInts = new ReadInts { Reader = stream.AsReader() }; var res0 = compareInts.Schedule(count, batchSize, jobHandle); var res1 = compareInts.Schedule(count, batchSize, jobHandle); res0.Complete(); res1.Complete(); stream.Dispose(); }
public void PopulateInts([Values(1, 100, 200)] int count, [Values(1, 3, 10)] int batchSize) { var stream = new BlockStream(count, 0x9b98651c); var fillInts = new WriteInts { Writer = stream }; var jobHandle = fillInts.Schedule(count, batchSize); var compareInts = new ReadInts { Reader = stream }; var res0 = compareInts.Schedule(count, batchSize, jobHandle); var res1 = compareInts.Schedule(count, batchSize, jobHandle); res0.Complete(); res1.Complete(); stream.Dispose(); }