public static void StreamWrite(ref UnsafeStream stream, int numElements) { var writer = stream.AsWriter(); for (int i = 0; i < numElements; ++i) { writer.Write(i); } }
static void AllocateBlock(out NativeStream stream, Allocator allocator) { CheckAllocator(allocator); UnsafeStream.AllocateBlock(out stream.m_Stream, allocator); #if ENABLE_UNITY_COLLECTIONS_CHECKS DisposeSentinel.Create(out stream.m_Safety, out stream.m_DisposeSentinel, 0, allocator); #endif }
public static void FillFromString(UnsafeBuffer *buffer, string value) { using (var stream = new UnsafeStream((byte *)buffer)) using (var writer = new StreamWriter(stream, BsonConstants.Encoding)) { foreach (var symbol in value) { writer.Write(symbol); } } }
static void AllocateBlock(out NativeStream stream, Allocator allocator) { #if ENABLE_UNITY_COLLECTIONS_CHECKS if (allocator <= Allocator.None) { throw new ArgumentException("Allocator must be Temp, TempJob or Persistent", "allocator"); } #endif UnsafeStream.AllocateBlock(out stream.m_Stream, allocator); #if ENABLE_UNITY_COLLECTIONS_CHECKS DisposeSentinel.Create(out stream.m_Safety, out stream.m_DisposeSentinel, 0, allocator); #endif }
internal static ResourceNode TryParse(DataSource source, ResourceNode parent) { ResourceNode n = null; using (UnsafeStream s = new UnsafeStream(source.Address, (uint)source.Length)) { foreach (PluginResourceParser p in BrawlAPIInternal.ResourceParsers) { if ((n = p.TryParse(s)) != null) { break; } } } return(n); }
internal static ResourceNode TryParse(DataSource source) { ResourceNode n = null; using (UnsafeStream s = new UnsafeStream(source.Address, (uint)source.Length)) { foreach (PluginLoader ldr in BrawlAPI.Loaders) { if ((n = ldr.TryParse(s)) != null) { break; } } } return(n); }
public void UnsafeStream_Performance_Write_Burst() { const int numElements = 16 << 10; var stream = new UnsafeStream(1, Allocator.Persistent); var funcPtr = BurstCompiler.CompileFunctionPointer <Pointers.StreamWriteDelegate>(Pointers.StreamWrite); Measure.Method(() => { funcPtr.Invoke(ref stream, numElements); }) .WarmupCount(100) .MeasurementCount(1000) .Run(); stream.Dispose(); }
static unsafe void FilterSamplesByFunction(NativeArray <int> functionWhiteList, NativeArray <SampleData> samples, NativeArray <StackFrameData> stackFrames, out NativeArray <SampleData> outSamples) { if (functionWhiteList.Length == 0) { outSamples = default; return; } using (var sampleStream = new UnsafeStream(samples.Length, Allocator.TempJob)) { if (functionWhiteList.Length == 1) { new FilterSamplesByFunction <SingleFunctionMatcher> { Frames = (StackFrameData *)stackFrames.GetUnsafeReadOnlyPtr(), Samples = (SampleData *)samples.GetUnsafeReadOnlyPtr(), OutputSamples = sampleStream.AsWriter(), Matcher = new SingleFunctionMatcher { Function = functionWhiteList[0] } }.Schedule(samples.Length, 32).Complete(); } else { new FilterSamplesByFunction <MultiFunctionMatcher> { Frames = (StackFrameData *)stackFrames.GetUnsafeReadOnlyPtr(), Samples = (SampleData *)samples.GetUnsafeReadOnlyPtr(), OutputSamples = sampleStream.AsWriter(), Matcher = new MultiFunctionMatcher { Functions = (int *)functionWhiteList.GetUnsafeReadOnlyPtr(), NumFunctions = functionWhiteList.Length } }.Schedule(samples.Length, 32).Complete(); } outSamples = sampleStream.ToNativeArray <SampleData>(Allocator.TempJob); } }
public void UnsafeStream_Performance_Write() { const int numElements = 16 << 10; var stream = new UnsafeStream(1, Allocator.Persistent); var writer = stream.AsWriter(); Measure.Method(() => { for (int i = 0; i < numElements; ++i) { writer.Write(i); } }) .WarmupCount(100) .MeasurementCount(1000) .Run(); stream.Dispose(); }
/// <summary> /// Returns a result with specified index from results /// </summary> public unsafe TDocument this[int index] { get { EnsureInRange(index); int size; var resultPointer = CursorResult(index, out size); using (var stream = new UnsafeStream(resultPointer)) using (var reader = new BsonReader(stream)) { //TODO: Try to move this hack to deserialization step var id = *((ObjectId *)(resultPointer + 9)); TDocument result = Serializer.Deserialize <TDocument>(reader); IdHelper <TDocument> .SetId(result, ref id); return(result); } } }
unsafe void RefreshData(int threadIndex) { if (m_FunctionSearchField.value == m_CurrentSearchTerm && m_CurrentThread == threadIndex) { return; } if (threadIndex < 0) { return; } if (!m_MergedStackFrames.IsCreated) { m_MergedSamples = new NativeArray <SampleData>(m_Trace.Samples, Allocator.Persistent); m_MergedStackFrames = new NativeList <StackFrameData>(Allocator.Persistent); new MergeCallStacksJob { MergeBy = MergeCallStacksJob.MergeMode.ByFunction, NewStackFrames = m_MergedStackFrames, Samples = m_MergedSamples, StackFrames = m_Trace.StackFrames }.Run(); } if (m_FunctionSearchField.value != m_CurrentSearchTerm) { m_CurrentSearchTerm = m_FunctionSearchField.value; m_FilteredSamples.TryDispose(); m_FunctionWhiteList.TryDispose(); m_FunctionWhiteList = FindFunctionsByName(m_FunctionSearchField.value, m_Trace.Functions); FilterSamplesByFunction(m_FunctionWhiteList, m_MergedSamples, m_MergedStackFrames, out m_FilteredSamples); } if (m_FunctionWhiteList.Length == 0) { m_SubtreeTreeView.ClearData(); return; } var threadCollection = new CollectThreadStackFrames { Thread = threadIndex, Samples = m_FilteredSamples, StackFrames = m_MergedStackFrames, FramesInThread = new NativeList <StackFrameSamples>(Allocator.TempJob), SamplesInThread = new NativeList <SampleData>(Allocator.TempJob) }; threadCollection.Run(); threadCollection.SamplesInThread.Dispose(); if (m_FunctionWhiteList.Length == 1) { new FilterFramesByFunctionJob <SingleFunctionMatcher> { Frames = threadCollection.FramesInThread.AsArray(), Matcher = new SingleFunctionMatcher { Function = m_FunctionWhiteList[0] } }.Schedule().Complete(); } else if (m_FunctionWhiteList.IsCreated) { new FilterFramesByFunctionJob <MultiFunctionMatcher> { Frames = threadCollection.FramesInThread.AsArray(), Matcher = new MultiFunctionMatcher { Functions = (int *)m_FunctionWhiteList.GetUnsafeReadOnlyPtr(), NumFunctions = m_FunctionWhiteList.Length } }.Schedule().Complete(); } var stream = new UnsafeStream(threadCollection.FramesInThread.Length, Allocator.TempJob); new CollectSamplesJob { Frames = (StackFrameSamples *)threadCollection.FramesInThread.GetUnsafeReadOnlyPtr(), OutSamples = stream.AsWriter() }.Schedule(threadCollection.FramesInThread.Length, 32).Complete(); var aggregateJob = new AggregateSamplesJob { SamplesIn = stream.ToNativeArray <FunctionSampleData>(Allocator.TempJob), SamplesOut = new NativeHashMap <int, FunctionSampleData>(threadCollection.FramesInThread.Length, Allocator.TempJob) }; threadCollection.FramesInThread.Dispose(); aggregateJob.Run(); var values = aggregateJob.SamplesOut.GetValueArray(Allocator.Persistent); aggregateJob.SamplesIn.Dispose(); aggregateJob.SamplesOut.Dispose(); NativeSortExtension.Sort(values, new FunctionByTotal()); m_FunctionSamples.TryDispose(); m_FunctionSamples = values; m_SubtreeTreeView.SetData(ref m_Trace, m_FunctionSamples); }
internal Stream ConvertToStream(BsonHandle bson) { var stream = new UnsafeStream(bson); return(stream); }