private async void Button_Click_4(object sender, RoutedEventArgs e) { FileOpenPicker picker = new FileOpenPicker(); picker.FileTypeFilter.Add(".mp3"); StorageFile file = await picker.PickSingleFileAsync(); var properties = await file.GetBasicPropertiesAsync(); if (properties == null) { return; } byte[] data = new byte[properties.Size]; using (var stream = await file.OpenStreamForReadAsync()) { using (BinaryReader bw = new BinaryReader(stream)) { data = bw.ReadBytes(data.Length); } } FileBuffer sendData = new FileBuffer(data, file.Name); await service.SendAsync(sendData); }
public void SimpleRead_Works() { var buffer = new FileBuffer( new Context() { VolumeStream = SparseStream.FromStream( new MemoryStream(Encoding.UTF8.GetBytes("Hello, World!")), Ownership.Dispose), VolumeHeader = new VolumeHeader() { BlockSize = 0x100, }, }, new ForkData() { LogicalSize = 0x123, Extents = new ExtentDescriptor[] { new ExtentDescriptor() { BlockCount = 1, StartBlock = 0, }, }, }, new CatalogNodeId(1)); byte[] data = new byte[0x10]; Assert.Equal(13, buffer.Read(0, data, 0, data.Length)); }
public static void CallFile(HtcHttpContext httpContext, string requestPath) { using (var fileBuffer = new FileBuffer(requestPath, 2048)) { var contentType = ContentType.DEFAULT.FromExtension(requestPath); // 7 * 24 Hour * 60 Min * 60 Sec = 604800 Sec; httpContext.Response.Headers.Add("Access-Control-Allow-Origin", "*"); httpContext.Response.Headers.Add("Date", DateTime.Now.ToString("r")); //context.Response.Headers.Add("Last-Modified", File.GetLastWriteTime(requestPath).ToString("r")); httpContext.Response.Headers.Add("Server", "HtcSharp"); //context.Response.Headers.Add("Cache-Control", "max-age=604800"); httpContext.Response.ContentType = contentType.ToValue(); httpContext.Response.StatusCode = 200; var(startRange, endRange) = GetRange(httpContext, fileBuffer); if (UseGzip(httpContext, fileBuffer.Lenght)) { httpContext.Response.Headers.Add("Content-Encoding", "gzip"); using (var gzipStream = new GZipStream(httpContext.Response.OutputStream, CompressionMode.Compress, false)) { fileBuffer.CopyToStream(gzipStream, startRange, endRange); } } else { httpContext.Response.ContentLength = endRange - startRange; fileBuffer.CopyToStream(httpContext.Response.OutputStream, startRange, endRange); } } }
private static Tuple <long, long> GetRange(HtcHttpContext httpContext, FileBuffer fileBuffer) { var rangeData = GetHeader(httpContext, "Range"); long startRange; long endRange = -1; if (rangeData != null) { var rangeHeader = rangeData.Replace("bytes=", ""); var range = rangeHeader.Split('-'); startRange = long.Parse(range[0]); if (range[1].Trim().Length > 0) { long.TryParse(range[1], out endRange); } if (endRange == -1) { endRange = fileBuffer.Lenght; } } else { startRange = 0; endRange = fileBuffer.Lenght; } return(new Tuple <long, long>(startRange, endRange)); }
// [TestMethod] public void Test() { //var inFilePath = @"d:\src\opal\tests\opal.txt"; var inFilePath = @"D:\src\opal\tests\OpalTests\maple.sql.cs"; var sw = Stopwatch.StartNew(); for (var i = 0; i < 10000; i++) { var text = File.ReadAllText(inFilePath); var buffer = new StringBuffer(text); var scanner = new Scanner(buffer); while (scanner.NextToken().State != TokenStates.Empty) { } } TestContext.WriteLine($"ReadAll: {sw.Elapsed}"); //var elapsed = sw.Elapsed; sw = Stopwatch.StartNew(); for (var i = 0; i < 10000; i++) { using var buffer = new FileBuffer(inFilePath); var scanner = new Scanner(buffer); while (scanner.NextToken().State != TokenStates.Empty) { } } TestContext.WriteLine($"FileBuffer: {sw.Elapsed}"); }
public void ReadFrom_BeyondEndOfFile_Throws() { var cnid = new CatalogNodeId(1); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns((byte[])null); var buffer = new FileBuffer( new Context() { VolumeStream = Stream.Null, VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x123, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 0, }, cnid); byte[] data = new byte[0x10]; Assert.Throws <InvalidOperationException>(() => buffer.Read(0, data, 0, data.Length)); }
public FileWrite(FileStatus status, int count, long position, File file, FileBuffer buffer) { this.status = status; this.count = count; this.position = position; this.file = file; this.buffer = buffer; }
public override ImportResult Import() { using (var fileBuffer = new FileBuffer(DbfFilename, FileBuffer.BufferType.Read)) { dbfRegistry = new DbfRegistryController(fileBuffer.BufferFilename); return(base.Import()); } }
public void Write_Throws() { var buffer = new FileBuffer( new Context(), new ForkData(), new CatalogNodeId(1)); Assert.Throws <NotSupportedException>(() => buffer.Write(1, Array.Empty <byte>(), 0, 1)); }
public void SetCapacity_Throws() { var buffer = new FileBuffer( new Context(), new ForkData(), new CatalogNodeId(1)); Assert.Throws <NotSupportedException>(() => buffer.SetCapacity(1)); }
public RepositoryViewReadRoutine(RepositoryViewCache cache, int piece, FileBuffer buffer, RepositoryViewReadCallback callback) { this.piece = piece; this.buffer = buffer; this.callback = callback; this.entries = cache.Find(piece); this.offset = piece * (long)cache.PieceSize; }
public RepositoryViewWriteRoutine(RepositoryViewCache cache, int piece, int block, FileBuffer buffer, RepositoryViewWriteCallback callback) { this.piece = piece; this.block = block; this.buffer = buffer; this.callback = callback; this.entries = cache.Find(piece, block, 1); this.offset = piece * (long)cache.PieceSize + block * cache.BlockSize; }
protected override ExportResult TryExportItems(IEnumerable <RepairPayment> repairPayments) { if (repairPayments == null) { return(new ExportResult()); } var paymentsForExport = repairPayments .Where(repairPayment => repairPayment != null) .Select(repairPayment => new DbfGarbageOrRepairPayment( repairPayment.FinancialPeriodCode, repairPayment.CreateDate, repairPayment.CreateDate, repairPayment.FilialCode, repairPayment.OrganizationCode, repairPayment.CustomerNumber, Utils.RubToCopeck(repairPayment.Cost))); var countItemsForExport = paymentsForExport.Count(); if (countItemsForExport == 0) { return(new ExportResult()); } var dbfFilename = Path.Combine(Config.OutputDirectory, string.Format(Config.RepairPaymentsDbfOutputFileFormat, DateTime.Now)); Exception exportException = null; try { using (var fileBuffer = new FileBuffer(dbfFilename, FileBuffer.BufferType.Create)) { try { var dbfRegistry = new DbfRegistryController(fileBuffer.BufferFilename); dbfRegistry.StoreGarbageCollectionPayments(paymentsForExport); } catch (Exception ex) { exportException = ex; } } } finally { if (exportException != null) { throw exportException; } } return(new ExportResult(countItemsForExport, repairPayments.Count() - countItemsForExport)); }
public void Read(int piece) { int offset = piece * 16384; FileHash hash = context.Parameters.Hash; FileBuffer buffer = new FileBuffer(16384); file.Read(offset, buffer, result => { context.Queue.Add(new MetafileTaskRead(hash, piece, result)); }); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { int blockSize = context.Metainfo.Properties.BlockSize; RepositoryMemoryBlock block = context.Dependencies.Memory.Allocate(blockSize); FileBuffer buffer = new FileBuffer(block.Data, 0, blockSize); context.View.Read(buffer, index.Piece.Index, index.Offset / blockSize, result => { context.Queue.Add(new Complete(index, result)); }); }
public FileIconTitle(FileBuffer fileBuffer) : base(false, 0) { this.fileBuffer = fileBuffer; icon = new ImageFileIcon(fileBuffer) { Visible = true }; title = new LabelFileTitle(fileBuffer) { Visible = true }; PackStart(icon, false, true, 0); PackStart(title, false, true, 0); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { data.With((buffer, offset, count) => { int blockSize = context.Metainfo.Properties.BlockSize; FileBuffer file = new FileBuffer(buffer, offset, count); context.View.Write(file, index.Piece.Index, index.Offset / blockSize, args => { context.Queue.Add(new Complete(index, data)); }); }); }
public void LoadFile() { FileBuffer fb = new FileBuffer("test1.bin"); byte[] baExpect = { 0x3c, 0x13, 0xe3, 0x36, 0xcc, 0x66, 0x21, 0xda }; byte[] ba = new byte[baExpect.Length]; fb.Read(ba, 0, 0x3f0, baExpect.Length); Assert.IsNotNull(ba, "#1"); for (int i = 0; i < baExpect.Length; i++) { Assert.AreEqual(baExpect[i], ba[i]); } }
public void BasicProperties_Work() { var buffer = new FileBuffer( new Context(), new ForkData() { LogicalSize = 0x123, }, new CatalogNodeId(1)); Assert.True(buffer.CanRead); Assert.False(buffer.CanWrite); Assert.Equal(0x123, buffer.Capacity); }
public void Execute() { long position = offset - entries[0].Start; long count = entries[0].Size - position; if (count >= buffer.Count) { count = buffer.Count; } Receiver receiver = new Receiver(this); FileBuffer data = new FileBuffer(buffer.Data, buffer.Offset, (int)count); entries[0].File.Write(position, data, receiver.OnCompleted); }
public void IndexerAccessTest() { FileBuffer fb = new FileBuffer("test1.bin"); Assert.IsNotNull(fb, "#1"); long size = fb.Size; int sum = 0; for (int i = 0; i < size; i++) { sum ^= fb[i]; } Assert.AreEqual(0x88, sum); }
public void ReadFromOverflow_Works() { var cnid = new CatalogNodeId(1); var descriptor = new ExtentDescriptor() { BlockCount = 1, StartBlock = 0 }; byte[] descriptorBytes = new byte[2 * descriptor.Size]; descriptor.WriteTo(descriptorBytes, 0); descriptor.StartBlock = 1; descriptor.WriteTo(descriptorBytes, descriptor.Size); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns(descriptorBytes); var buffer = new FileBuffer( new Context() { VolumeStream = SparseStream.FromStream( new MemoryStream(Encoding.UTF8.GetBytes("Hello, World!")), Ownership.Dispose), VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x123, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 2, }, cnid); byte[] data = new byte[0x10]; Assert.Equal(8, buffer.Read(0, data, 0, data.Length)); Assert.Equal(5, buffer.Read(8, data, 8, data.Length)); }
public void Execute(RepositoryContext context, RepositoryTaskCallback onCompleted) { int blocksInBuffer = block.Length / context.Metainfo.Properties.BlockSize; int blocksInPiece = context.Metainfo.Properties.PieceSize / context.Metainfo.Properties.BlockSize; int step = Math.Min(blocksInBuffer, blocksInPiece); FileBuffer buffer = new FileBuffer(block.Data, 0, step * context.Metainfo.Properties.BlockSize); context.View.Read(buffer, piece, 0, args => { if (args.Count > 0 && context.View.Exists(args.Piece, args.Block + step)) { context.Queue.Add(new Continue(bitfield, scope, algorithm, args, block)); } else { context.Queue.Add(new Complete(bitfield, scope, algorithm, args, block)); } }); }
public void GetExtentsInRange_Works() { var cnid = new CatalogNodeId(1); var extentsOverflow = new Mock <BTree <ExtentKey> >(MockBehavior.Strict); extentsOverflow .Setup(e => e.Find(new ExtentKey(cnid, 0, false))) .Returns((byte[])null); var buffer = new FileBuffer( new Context() { VolumeStream = Stream.Null, VolumeHeader = new VolumeHeader() { BlockSize = 8, }, ExtentsOverflow = extentsOverflow.Object, }, new ForkData() { LogicalSize = 0x10, Extents = Array.Empty <ExtentDescriptor>(), TotalBlocks = 0, }, cnid); var extent = Assert.Single(buffer.GetExtentsInRange(8, 8)); Assert.Equal(8, extent.Start); Assert.Equal(8, extent.Length); extent = Assert.Single(buffer.GetExtentsInRange(8, 16)); Assert.Equal(8, extent.Start); Assert.Equal(8, extent.Length); extent = Assert.Single(buffer.GetExtentsInRange(16, 16)); Assert.Equal(16, extent.Start); Assert.Equal(0, extent.Length); }
public ForwardOutputPlugin(ForwardOutputConfig config, ILoggerFactory loggerFactory) { _config = config; _fileBuffer = new FileBuffer(this, config.BufferSettings, loggerFactory); _updateServersTimer = new Timer { AutoReset = true, Interval = 10000, Enabled = true }; _updateServersTimer.Elapsed += OnUpdateServersTimerElapsed; _logger = loggerFactory.CreateLogger(typeof(ForwardOutputPlugin)); foreach (var server in config.Servers) { _workers.Add(new ForwardWorker(this, server)); } UpdateServers(); }
public void TestBuffer() { var stream = new MemoryStream(); var writer = new StreamWriter(stream); writer.Write("44+4+4"); writer.Flush(); stream.Seek(0, SeekOrigin.Begin); var buffer = new FileBuffer(stream); var scanner = new Scanner(buffer); var token = scanner.NextToken(); Assert.AreEqual("44", token.Value); token = scanner.NextToken(); Assert.AreEqual("+", token.Value); token = scanner.NextToken(); Assert.AreEqual("4", token.Value); token = scanner.NextToken(); Assert.AreEqual("+", token.Value); token = scanner.NextToken(); Assert.AreEqual("4", token.Value); token = scanner.NextToken(); Assert.AreEqual(string.Empty, token.Value); }
static void HttpParserExample() { SocketServer socketServer = new SocketServer(); socketServer.Init(); FileBuffer FB = FileBuffer.GetInstance(); FB.Run(); if (socketServer.Start() == false) { Console.WriteLine("Start failed"); } socketServer.OnDataReceived += new SocketServer.ConnetionChangedEventHandler(OnReceivedHttpReq); Console.WriteLine("Press any key to STOP the server process...."); Console.ReadKey(); socketServer.Stop(); FB.Stop(); Console.WriteLine("Press any key to RESTART the server process...."); Console.ReadKey(); FB.Run(); socketServer.Start(); Console.WriteLine("Press any key to terminate the server process...."); Console.ReadKey(); }
public ImageFileIcon(FileBuffer fileBuffer) : base(Gtk.Stock.File, Gtk.IconSize.Menu) { this.fileBuffer = fileBuffer; }
// "열기" 이벤트 private void button3_Click(object sender, EventArgs e) { openFileDialog1.Filter = "*.pack|*.pack"; openFileDialog1.FileName = ""; if (openFileDialog1.ShowDialog() == System.Windows.Forms.DialogResult.OK) { m_files.Clear(); using (FileStream fs = new FileStream(openFileDialog1.FileName, FileMode.Open)) { using (BinaryReader br = new BinaryReader(fs)) { int totalFileCount = br.ReadInt32(); for( int i = 0 ; i < totalFileCount ; ++i ) { FileBuffer file = new FileBuffer(); int nameLength = br.ReadInt32(); file.Name = br.ReadString(); file.FileSize = br.ReadInt32(); file.buffer = br.ReadBytes(file.FileSize); m_files.Add(file); } } } listBox1.Items.Clear(); foreach (FileBuffer file in m_files) { listBox1.Items.Add(file.Name); } } }
public void Load(Stream stream) { _buffer = new FileBuffer(stream); }
protected override ExportResult TryExportItems(IEnumerable <WaterCustomerPayment> waterCustomerPayments) { if (waterCustomerPayments == null) { return(new ExportResult()); } var paymentsForExport = waterCustomerPayments .Where(waterCustomerPayment => waterCustomerPayment != null) .Select(waterCustomerPayment => new DbfWaterCustomerPayment( waterCustomerPayment.CreateDate, waterCustomerPayment.WaterCustomer.Number, waterCustomerPayment.Cost, waterCustomerPayment.CreateDate.ToString("yyyyMM"), waterCustomerPayment.Penalty, waterCustomerPayment.WaterCustomer.CounterNumber1, 0, waterCustomerPayment.CounterValue1, waterCustomerPayment.WaterCustomer.CounterNumber2, 0, waterCustomerPayment.CounterValue2, waterCustomerPayment.WaterCustomer.CounterNumber3, 0, waterCustomerPayment.CounterValue3, waterCustomerPayment.WaterCustomer.CounterNumber4, 0, waterCustomerPayment.CounterValue4)); var countItemsForExport = paymentsForExport.Count(); if (countItemsForExport == 0) { return(new ExportResult()); } var dbfFilename = Path.Combine(Config.OutputDirectory, string.Format(Config.WaterCustomerDbfOutputFileFormat, DateTime.Now)); Exception exportException = null; try { using (var fileBuffer = new FileBuffer(dbfFilename, FileBuffer.BufferType.Create)) { try { var dbfRegistry = new DbfRegistryController(fileBuffer.BufferFilename); dbfRegistry.StoreWaterCustomerPayments(paymentsForExport); } catch (Exception ex) { exportException = ex; } } } finally { if (exportException != null) { throw exportException; } } return(new ExportResult(countItemsForExport, waterCustomerPayments.Count() - countItemsForExport)); }
public void Write(FileBuffer buffer, int piece, int block, RepositoryViewWriteCallback callback) { new RepositoryViewWriteRoutine(cache, piece, block, buffer, callback).Execute(); }
public void Read(FileBuffer buffer, int piece, RepositoryViewReadCallback callback) { new RepositoryViewReadRoutine(cache, piece, buffer, callback).Execute(); }