Exemple #1
0
        public void OpenCloseDirectFiles()
        {
            var path  = TestUtils.GetPath("UtilsTests");
            var count = 10_000;

            using (Benchmark.Run("Open/Close view", count * 1_000))
            {
                using (var directFile = new DirectFile(Path.Combine(path, "test_file.bin"), 4096, true))
                {
                    for (int i = 1; i <= count; i++)
                    {
                        directFile.Grow(4096 + 4096 * i);
                        directFile.Buffer.Write <long>(4096 * i - 8, i);
                    }
                }
            }
            Benchmark.Dump();

            using (var directFile = new DirectFile(Path.Combine(path, "test_file.bin"), 4096, true))
            {
                for (int i = 1; i <= count; i++)
                {
                    directFile.Grow(4096 + 4096 * i);
                    var val = directFile.Buffer.Read <long>(4096 * i - 8);
                    Assert.AreEqual(i, val);
                }
            }
        }
Exemple #2
0
        public unsafe LogBuffer(string filename, int termSizeMb = 5)
        {
            Filename = filename;
            TermSize = termSizeMb * 1024 * 1024;

            _df            = new DirectFile(filename, LogSize);
            _writerTerm    = ActiveTermId;
            _writerTailPtr = Tail(_writerTerm);
            _writerTermPtr = _df._buffer._data + HeaderSize + TermSize * _writerTerm;

            // init at the last known place
            _readerTerm    = _writerTerm;
            _readerTail    = *(int *)_writerTailPtr;
            _readerTermPtr = _writerTermPtr;

            _cts = new CancellationTokenSource();

            _reader = Task.Factory.StartNew(() => {
                var sw = new SpinWait();
                while (!_cts.Token.IsCancellationRequested)
                {
                    try {
                    } finally {
                        var tail = _readerTail; // Volatile.Read(ref *(int*)_readerTailPtr);
                        if (tail >= TermSize || *(int *)(_readerTermPtr + tail) == -1)
                        {
                            while (ActiveTermId == _readerTerm)
                            {
                                sw.SpinOnce();
                            }
                            // switch term
                            _readerTerm    = (_readerTerm + 1) % NumberOfTerms;
                            _readerTail    = 0;
                            _readerTermPtr = _df._buffer._data + HeaderSize + TermSize * _readerTerm;
                        }
                        else
                        {
                            var len = *(int *)(_readerTermPtr + tail);
                            if (len > 0)
                            {
                                // could read value
                                byte[] bytes = new byte[len];
                                Marshal.Copy((_readerTermPtr + tail + 4), bytes, 0, len);
                                OnAppend?.Invoke((_readerTermPtr + tail));
                                _readerTail = _readerTail + len + 4;
                            }
                        }
                        Thread.SpinWait(1);
                        // TODO? implement signaling via WaitHandle?
                        //if (sw.NextSpinWillYield) sw.Reset();
                        //sw.SpinOnce();
                    }
                }
                OptimizationSettings.TraceVerbose("LogBuffer invoke loop exited");
            }, _cts.Token, TaskCreationOptions.LongRunning, TaskScheduler.Default)
                      .ContinueWith(task => {
                Console.WriteLine("LogBuffer OnAppendHandlerOld Invoke should never throw exceptions" + Environment.NewLine + task.Exception);
                Environment.FailFast("LogBuffer OnAppendHandlerOld Invoke should never throw exceptions", task.Exception);
            }, TaskContinuationOptions.OnlyOnFaulted);
        }
        public void CouldOpenSparse()
        {
            var path = TestUtils.GetPath();
            var df   = new DirectFile(Path.Combine(path, "test.file"), 64L * 1024 * 1024 * 1024, true,
                                      FileOptions.WriteThrough, sparse: true);

            Unsafe.InitBlockUnaligned(df.DirectBuffer.Data, 0, 1024);

            df.Dispose();
        }
Exemple #4
0
        public void OpenHugeDirectFile()
        {
            var path     = TestUtils.GetPath();
            var filePath = Path.Combine(path, "test.tmp");

            var df   = new DirectFile(filePath, 50 * 1024 * 1024 * 1024L, true);
            var span = df.Buffer.Span;

            span[1] = 1;

            df.Dispose();
        }
        public void CouldWriteToWriteThroughFile()
        {
            var path = TestUtils.GetPath();
            var df   = new DirectFile(Path.Combine(path, "test.file"), 1024 * 1024, true,
                                      FileOptions.WriteThrough);

            df.DirectBuffer.Write(0, 123L);
            var bytes = new byte[] { 123 };

            df._va.WriteArray(0, bytes, 0, 1);
            var ums = df._mmf.CreateViewStream(0, 1024 * 1024);

            ums.Write(bytes, 0, 1);
            ums.Dispose();
            df.Flush(true);
            df.Dispose();
        }
        public void CouldReadUnallocatedSparse()
        {
            var path = TestUtils.GetPath();
            var df   = new DirectFile(Path.Combine(path, "test.file"), 1L * 1024 * 1024, true,
                                      FileOptions.WriteThrough, sparse: true);

            var sum = 0;

            for (long i = 0; i < df.Length; i++)
            {
                sum += unchecked (df.Buffer[i]);
                // Console.WriteLine(df.Buffer[i]);
            }

            Assert.AreEqual(0, sum);

            df.Dispose();
        }
        public IpcLongIncrementListener(string filename, Action <long, long> action, long init = -1L)
        {
            _action = action;
            _df     = new DirectFile(filename + ".ipclistener", 8);

            var  handleName = Path.GetFileName(filename) + ".ipclistener";
            bool created;

            _eh = new EventWaitHandle(false, EventResetMode.ManualReset, handleName, out created);
            if (created && init != -1L)
            {
                _df.Buffer.VolatileWriteInt64(0, init);
                _lastSeenValue = init;
            }
            else
            {
                _lastSeenValue = _df.Buffer.VolatileReadInt64(0);
            }
        }
Exemple #8
0
        public LogBuffers(string logFileName, int termLength = LogBufferDescriptor.TERM_MIN_LENGTH)
        {
            try {
                long logLength = LogBufferDescriptor.PARTITION_COUNT *
                                 (LogBufferDescriptor.TERM_META_DATA_LENGTH + termLength) +
                                 LogBufferDescriptor.LOG_META_DATA_LENGTH;
                termLength = LogBufferDescriptor.ComputeTermLength(logLength);
                LogBufferDescriptor.CheckTermLength(termLength);
                _df         = new DirectFile(logFileName, logLength);
                _termLength = termLength;

                // if log length exceeds MAX_INT we need multiple mapped buffers, (see FileChannel.map doc).
                if (logLength < int.MaxValue)
                {
                    int metaDataSectionOffset = termLength * LogBufferDescriptor.PARTITION_COUNT;

                    for (int i = 0; i < LogBufferDescriptor.PARTITION_COUNT; i++)
                    {
                        int metaDataOffset = metaDataSectionOffset + (i * LogBufferDescriptor.TERM_META_DATA_LENGTH);

                        _buffers[i] = new DirectBuffer(termLength, _df.Buffer.Data + i * termLength);
                        _buffers[i + LogBufferDescriptor.PARTITION_COUNT] = new DirectBuffer(LogBufferDescriptor.TERM_META_DATA_LENGTH, _df.Buffer.Data + metaDataOffset);
                        _partitions[i] = new LogBufferPartition(_buffers[i], _buffers[i + LogBufferDescriptor.PARTITION_COUNT]);
                    }

                    _buffers[_buffers.Length - 1] = new DirectBuffer(LogBufferDescriptor.LOG_META_DATA_LENGTH,
                                                                     _df.Buffer.Data + (int)(logLength - LogBufferDescriptor.LOG_META_DATA_LENGTH));
                }
                else
                {
                    throw new NotImplementedException("TODO Check .NET mapping limit");
                }
            } catch (IOException ex) {
                throw new AggregateException(ex);
            }

            foreach (var buffer in _buffers)
            {
                buffer.VerifyAlignment(8);
            }
        }
Exemple #9
0
        public unsafe void CouldCatchErrorWhileWritingPastBoundary()
        {
            // This doesn;t throw unless size is 4096 vs 12
            // Could not make any assumptions that it is safe to write past boundary
            // with try..catch. Even byte[] allows it and we probably corrupt
            // .NETs memory next to it.
            var bytes = new byte[12];
            var fb    = new FixedBuffer(bytes);

            fixed(byte *ptr = &bytes[10])
            {
                *(long *)(ptr) = long.MaxValue;
            }

            var df = new DirectFile("../CouldCatchErrorWhileWritingPastBoundary", 12);

            *(long *)(df.Buffer.Data + 10) = long.MaxValue;
            df.Dispose();
            var df2 = new DirectFile("../CouldCatchErrorWhileWritingPastBoundary", 12);

            Assert.AreEqual(long.MaxValue, *(long *)(df2.Buffer.Data + 10));
        }
Exemple #10
0
 private PersistentArray(string filename, long minCapacity, T fill)
 {
     _df = new DirectFile(filename, DataOffset + minCapacity * ItemSize);
 }
        public void FileConcurrentWriteRead()
        {
            var pagesPerWrite = new[] { 1, 2, 3, 4, 8, 16, 32, 64, 128, 256, 512, 1024 };
            var pageSize      = 512;

            var count = TestUtils.GetBenchCount(5_000L, 50);

            var path = TestUtils.GetPath(clear: true);
            var file = Path.Combine(path, "test.file");

            using (var fs = new FileStream(file, FileMode.CreateNew))
            {
                fs.SetLength(count * pageSize);
            }

            foreach (var mult in pagesPerWrite)
            {
                var sem  = new SemaphoreSlim(0, (int)count);
                var page = new byte[pageSize * mult];

                var rt = Task.Run(() =>
                {
                    using (var df = new DirectFile(file, 0L, false, FileOptions.SequentialScan))
                    //using (var fs2 = new FileStream(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite,
                    //    pageSize * mult,
                    //    FileOptions.None))
                    //using (var memoryMappedFile =
                    //    MemoryMappedFile.CreateFromFile(fs2, Guid.NewGuid().ToString(), count * pageSize,
                    //        MemoryMappedFileAccess.ReadWrite,
                    //        HandleInheritability.None, false))
                    //using (var memoryMappedViewAccessor = memoryMappedFile.CreateViewAccessor())
                    {
                        using (Benchmark.Run("Read " + mult, count * pageSize))
                        {
                            var i     = 0;
                            var page2 = new byte[pageSize * mult];

                            while (i < count / mult)
                            {
                                sem.Wait();
                                var sp = df.DirectBuffer.Span.Slice(pageSize * i * mult, pageSize * mult);
                                //memoryMappedViewAccessor.ReadArray(pageSize * i * (long)mult, page2, 0,
                                //    pageSize * mult);
                                if (!((ReadOnlySpan <byte>)sp).SequenceEqual(page))
                                {
                                    Assert.Fail("Pages are not equal");
                                }
                                i++;
                            }
                        }
                    }
                });

                using (Benchmark.Run("Write " + mult, count * pageSize))
                {
                    using (var fs = new FileStream(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite,
                                                   pageSize * mult, FileOptions.WriteThrough)) //| WAL.WALWriter.FILE_FLAG_NO_BUFFERING)) //
                    {
                        fs.SetLength(count * pageSize);
                        new Random(123).NextBytes(page);
                        for (int i = 0; i < count / mult; i++)
                        {
                            fs.Position = pageSize * i * (long)mult;
                            fs.Write(page, 0, page.Length);

                            sem.Release();
                        }
                    }
                }

                rt.Wait();
            }

            Benchmark.Dump();
        }