internal BufferLock(int startIndex, int endIndex, ConcurrentBuffer buffer) { StartIndex = startIndex; EndIndex = endIndex; TargetBuffer = buffer; Stream = new MemoryStream(buffer.GetBuffer(), startIndex, endIndex - startIndex, true); }
static void Main(string[] args) { ConcurrentBuffer cb = new ConcurrentBuffer(32000); byte[] myData = { 32, 13, 53, 29, 50 }; // l1 will acquire a lock var l1 = cb.AcquireLock(0, 50); if (l1 != null) { l1.Stream.Write(myData, 0, myData.Length); } // l2 will fail because l1 has part of its range locked var l2 = cb.AcquireLock(30, 70); if (l2 != null) { l2.Stream.Write(myData, 0, myData.Length); } l1.Release(); // l3 will succeed at locking because l1 has been released var l3 = cb.AcquireLock(40, 5000); if (l3 != null) { while (l3.Stream.Position + myData.Length <= l3.Stream.Length) { l3.Stream.Write(myData, 0, myData.Length); } } l3.Release(); Console.ReadLine(); }
public void CreateBuffer_Returns_Unbounded_BlockingCollection_For_ZeroSize() { using (var buffer = ConcurrentBuffer.CreateBuffer <object>(0)) { Assert.True(buffer.BoundedCapacity == -1); } }
public void CreateBuffer_Returns_Bounded_BlockingCollection_As_Per_Given_Size(int size) { using (var buffer = ConcurrentBuffer.CreateBuffer <object>(size)) { Assert.True(buffer.BoundedCapacity == size); } }
public void ConcurrentBufferTestCancelProducer() { var cb = new ConcurrentBuffer <int>(32); var expected = Enumerable.Range(0, 10000).ToList(); var cts = new CancellationTokenSource(); var producer = new Thread(() => { for (var i = 0;; ++i) { if (!cb.Push(i, cts.Token)) { return; } } }); var actual = new List <int>(); var consumer = new Thread(() => { actual.AddRange(Enumerable.Range(0, 10000).Select(_ => cb.Pop())); cts.Cancel(); }); producer.Start(); consumer.Start(); producer.Join(); consumer.Join(); CollectionAssert.AreEqual(expected, actual); }
public void ConcurrentBufferTestSlowProducer() { var cb = new ConcurrentBuffer <int>(8); var expected = Enumerable.Range(0, 512).ToList(); var producer = new Thread(() => { foreach (var n in expected) { cb.Push(n); Thread.Sleep(5); } }); var actual = new List <int>(); var consumer = new Thread(() => { actual.AddRange(Enumerable.Range(0, expected.Count).Select(_ => cb.Pop())); }); producer.Start(); consumer.Start(); producer.Join(); consumer.Join(); CollectionAssert.AreEqual(expected, actual); }
private void Awake() { if (_ccbuffer == null) { _ccbuffer = new ConcurrentBuffer(); s_bufferbytes = _ccbuffer.GetBufferSize() * 4; } _sampleBufferList = new List <float[]>(); _audioThreadSampleQueueList = new List <ConcurrentQueue <float[]> >(); _sampleBufferBag = new ConcurrentBag <ConcurrentQueue <float[]> >(); }
public void ReadCanThrowTimeoutException() { bool timedout = false; var name = Guid.NewGuid().ToString(); byte[] data = new byte[1024]; byte[] readData = new byte[1024]; using (var buf = new ConcurrentBuffer(name, 1024)) using (var buf2 = new ConcurrentBuffer(name)) { // Set a small timeout to speed up the test buf2.ReadWriteTimeout = 0; // Introduce possible deadlock by acquiring without releasing the write lock. buf.AcquireWriteLock(); // We want the AcquireReadLock to fail if (!buf2.AcquireReadLock(1)) { try { // Read should timeout with TimeoutException because buf.ReleaseWriteLock has not been called buf2.Read(readData); } catch (TimeoutException e) { timedout = true; } } Assert.True(timedout); // Remove the deadlock situation, by releasing the write lock... buf.ReleaseWriteLock(); // ...and ensure that we can now read the data if (buf.AcquireReadLock(1)) { buf2.Read(readData); } else { Assert.True(false); // test has failed } } }
public void ProducerCanWriteAndConsumerCanRead() { var name = Guid.NewGuid().ToString(); using (var producer = new ConcurrentBuffer(name, 1024)) using (var consumer = new ConcurrentBuffer(name)) { int data = 123; producer.Write <int>(ref data); data = 456; producer.Write <int>(ref data, 1000); int readData; consumer.Read <int>(out readData); Assert.Equal(123, readData); consumer.Read <int>(out readData, 1000); Assert.Equal(456, readData); } }
/// <summary> /// Provides a workaround to decompressing gzip files that are concatenated /// I used http://www.zlib.org/rfc-gzip.html for header specification of GZip. /// This class is using streams to reduce load on memory (differently to CompressorMultiThreadHighMemory) /// </summary> /// <param name="filePath">FileInfo of gzip concatenated file</param> /// <param name="decompressedFileName">Name of the decompressed file</param> /// <param name="deleteOriginal">Bool flag whether to remove the original file</param> /// <returns>The decompressed byte content of the gzip file</returns> public int DecompressConcatenatedStreams(FileInfo filePath, string decompressedFileName, bool deleteOriginal = false) { List <long> startIndexes = new List <long>(); ConcurrentBuffer concBuffer = new ConcurrentBuffer(Const.CHUNK_SIZE_IN_MGBS * 1024 * 1024); using (FileStream inFileStream = File.OpenRead(filePath.Name)) { int bytesRead = 0; int bufferReadCount = 0; int byteCount = concBuffer.MaxCount; int offset = 0; while ((bytesRead = inFileStream.Read(concBuffer.Buffer, offset, byteCount)) != 0) { int traversableLength = bytesRead - _startOfFilePattern.Length; FindMatches(startIndexes, _startOfFilePattern, concBuffer.Buffer, traversableLength, bufferReadCount, byteCount); //important piece - make sure that pattern split across two buffers is not lost if (bytesRead == byteCount) { concBuffer.MoveLastBytesToBeginning(_startOfFilePattern.Length); } //needed to wrap the last couple of bytes to the next buffer if (bufferReadCount == 0) { byteCount -= _startOfFilePattern.Length; offset += _startOfFilePattern.Length; } bufferReadCount += 1; } startIndexes.Add(filePath.Length); } int result = ConcatenateDecompressedChunksLowMemory(startIndexes, filePath, decompressedFileName); if (deleteOriginal) { filePath.Delete(); } return(result); }
public void ReadWriteBytesDataMatches() { var name = Guid.NewGuid().ToString(); Random r = new Random(); byte[] data = new byte[1024]; byte[] readData = new byte[1024]; r.NextBytes(data); using (var buf = new ConcurrentBuffer(name, 1024)) using (var buf2 = new ConcurrentBuffer(name)) { buf.Write(data); buf2.Read(readData); for (var i = 0; i < data.Length; i++) { Assert.Equal(data[i], readData[i]); } } }
public void ConcurrentBufferTestCancelConsumer() { var cb = new ConcurrentBuffer <int>(32); var expected = Enumerable.Range(0, 10000).ToList(); var cts = new CancellationTokenSource(); var producer = new Thread(() => { foreach (var n in expected) { cb.Push(n); } cts.Cancel(); }); var actual = new List <int>(); var consumer = new Thread(() => { while (true) { var res = cb.Pop(cts.Token); if (!res) { break; } actual.Add(res.Value); } }); producer.Start(); consumer.Start(); producer.Join(); consumer.Join(); CollectionAssert.AreEqual(expected, actual); }
public PpcBuffer(int bufferSize, CancellationToken token) { _collection = ConcurrentBuffer.CreateBuffer <T>(bufferSize); _token = token; }
public ConcurrentBufferImpl(ConcurrentBuffer buffer) { this.buffer = buffer; }
public void CreateBuffer_Throws_Error_When_BufferSize_Is_Invalid(int size) { var ex = Assert.Throws <DdnDfException>(() => ConcurrentBuffer.CreateBuffer <object>(size)); Assert.True(ex.ErrorCode.Equals(DdnDfErrorCode.ValueLessThanThreshold)); }