/// <summary>Reads bytes into a buffer until EOF or the buffer's limit is reached</summary> /// <exception cref="System.IO.IOException"/> private static int FillBuffer(FileChannel channel, ByteBuffer buf) { int bytesRead = channel.Read(buf); if (bytesRead < 0) { //EOF return(bytesRead); } while (buf.Remaining() > 0) { int n = channel.Read(buf); if (n < 0) { //EOF return(bytesRead); } bytesRead += n; } return(bytesRead); }
internal virtual long CustomShuffleTransfer(WritableByteChannel target, long position ) { long actualCount = this.count - position; if (actualCount < 0 || position < 0) { throw new ArgumentException("position out of range: " + position + " (expected: 0 - " + (this.count - 1) + ')'); } if (actualCount == 0) { return(0L); } long trans = actualCount; int readSize; ByteBuffer byteBuffer = ByteBuffer.Allocate(this.shuffleBufferSize); while (trans > 0L && (readSize = fileChannel.Read(byteBuffer, this.position + position )) > 0) { //adjust counters and buffer limit if (readSize < trans) { trans -= readSize; position += readSize; byteBuffer.Flip(); } else { //We can read more than we need if the actualCount is not multiple //of the byteBuffer size and file is big enough. In that case we cannot //use flip method but we need to set buffer limit manually to trans. byteBuffer.Limit((int)trans); byteBuffer.Position(0); position += trans; trans = 0; } //write data to the target while (byteBuffer.HasRemaining()) { target.Write(byteBuffer); } byteBuffer.Clear(); } return(actualCount - trans); }
/// <summary>Read the cache file into memory.</summary> /// <remarks>Read the cache file into memory.</remarks> /// <exception cref="System.IO.IOException">System.IO.IOException</exception> public virtual void Read() { changed = false; statDirty = false; if (!cacheFile.Exists()) { header = null; entries.Clear(); lastCacheTime = 0; return; } cache = new RandomAccessFile(cacheFile, "r"); try { FileChannel channel = cache.GetChannel(); ByteBuffer buffer = ByteBuffer.AllocateDirect((int)cacheFile.Length()); buffer.Order(ByteOrder.BIG_ENDIAN); int j = channel.Read(buffer); if (j != buffer.Capacity()) { throw new IOException(MessageFormat.Format(JGitText.Get().couldNotReadIndexInOneGo , j, buffer.Capacity())); } buffer.Flip(); header = new GitIndex.Header(buffer); entries.Clear(); for (int i = 0; i < header.entries; ++i) { GitIndex.Entry entry = new GitIndex.Entry(this, buffer); GitIndex.Entry existing = entries.Get(entry.name); entries.Put(entry.name, entry); if (existing != null) { entry.stages |= existing.stages; } } lastCacheTime = cacheFile.LastModified(); } finally { cache.Close(); } }
/// <summary>Read the header without changing the position of the FileChannel.</summary> /// <param name="fc">The FileChannel to read.</param> /// <returns>the Metadata Header.</returns> /// <exception cref="System.IO.IOException">on error.</exception> public static Org.Apache.Hadoop.Hdfs.Server.Datanode.BlockMetadataHeader PreadHeader (FileChannel fc) { byte[] arr = new byte[GetHeaderSize()]; ByteBuffer buf = ByteBuffer.Wrap(arr); while (buf.HasRemaining()) { if (fc.Read(buf, 0) <= 0) { throw new EOFException("unexpected EOF while reading " + "metadata file header"); } } short version = (short)((arr[0] << 8) | (arr[1] & unchecked ((int)(0xff)))); DataChecksum dataChecksum = DataChecksum.NewDataChecksum(arr, 2); return(new Org.Apache.Hadoop.Hdfs.Server.Datanode.BlockMetadataHeader(version, dataChecksum )); }
/// <summary>Read from the block file into a buffer.</summary> /// <remarks> /// Read from the block file into a buffer. /// This function overwrites checksumBuf. It will increment dataPos. /// </remarks> /// <param name="buf"> /// The buffer to read into. May be dataBuf. /// The position and limit of this buffer should be set to /// multiples of the checksum size. /// </param> /// <param name="canSkipChecksum">True if we can skip checksumming.</param> /// <returns>Total bytes read. 0 on EOF.</returns> /// <exception cref="System.IO.IOException"/> private int FillBuffer(ByteBuffer buf, bool canSkipChecksum) { lock (this) { TraceScope scope = Trace.StartSpan("BlockReaderLocal#fillBuffer(" + block.GetBlockId () + ")", Sampler.Never); try { int total = 0; long startDataPos = dataPos; int startBufPos = buf.Position(); while (buf.HasRemaining()) { int nRead = dataIn.Read(buf, dataPos); if (nRead < 0) { break; } dataPos += nRead; total += nRead; } if (canSkipChecksum) { FreeChecksumBufIfExists(); return(total); } if (total > 0) { try { buf.Limit(buf.Position()); buf.Position(startBufPos); CreateChecksumBufIfNeeded(); int checksumsNeeded = (total + bytesPerChecksum - 1) / bytesPerChecksum; checksumBuf.Clear(); checksumBuf.Limit(checksumsNeeded * checksumSize); long checksumPos = BlockMetadataHeader.GetHeaderSize() + ((startDataPos / bytesPerChecksum ) * checksumSize); while (checksumBuf.HasRemaining()) { int nRead = checksumIn.Read(checksumBuf, checksumPos); if (nRead < 0) { throw new IOException("Got unexpected checksum file EOF at " + checksumPos + ", block file position " + startDataPos + " for " + "block " + block + " of file " + filename); } checksumPos += nRead; } checksumBuf.Flip(); checksum.VerifyChunkedSums(buf, checksumBuf, filename, startDataPos); } finally { buf.Position(buf.Limit()); } } return(total); } finally { scope.Close(); } } }
public static void LoadDict(Context context) { try { bool resaveEntries = false; dictParts = new List <byte[]>(); dictIndexes = new List <int>(); File dictFd = new File(context.FilesDir, "dict.db"); if (!dictFd.Exists()) { // || dictFd.length() != 4961308) { System.Console.WriteLine("DOES NOT EXIST!!!!!"); CopyFile(context, "dict.db"); dictFd = new File(context.FilesDir, "dict.db"); resaveEntries = true; } dictFile = new RandomAccessFile(dictFd, "r"); File idxFd = new File(context.FilesDir, "idx.db"); if (!idxFd.Exists()) { // || idxFd.length() != 3145553) { CopyFile(context, "idx.db"); idxFd = new File(context.FilesDir, "idx.db"); resaveEntries = true; } FileInputStream idxBuf = new FileInputStream(idxFd); if (!new File(context.FilesDir, "entries.bin").Exists() || !new File(context.FilesDir, "parts.bin").Exists()) { resaveEntries = true; } entries = IntBuffer.Allocate(1649830); int index = 0; //System.Console.WriteLine("LoadDict STEP 1"); if (idxBuf != null) { int readLen, offset = 0, partLen = 200000; byte[] dictPart = new byte[partLen]; int totalRead = 0; int totalLen = (int)idxFd.Length(); while (totalRead < totalLen && (readLen = idxBuf.Read(dictPart, offset, dictPart.Length - offset)) > 0) { //System.Console.WriteLine("LoadDict \ntotalRead = " + totalRead + "\ntotalLen = " + totalLen + "\nreadLen = " + readLen + "\nidxBuf.Read = " + idxBuf.Read(dictPart, offset, dictPart.Length - offset)); totalRead += readLen; int j = offset + readLen - 1; byte[] newDictPart = null; if (readLen == partLen - offset) { //System.Console.WriteLine("LoadDict STEP 4.1 " + dictPart[j] + " :: j => " + j); while (dictPart[j] > 0) { //System.Console.WriteLine("j = " + j + "\ndictPart[j] = " + dictPart[j]); j--; } //System.Console.WriteLine("LoadDict STEP 4.2"); while (dictPart[j] < 0) { System.Console.WriteLine("j = " + j); j--; } //System.Console.WriteLine("LoadDict STEP 4.3"); offset = partLen - j - 1; //System.Console.WriteLine("LoadDict STEP 4.4"); newDictPart = new byte[Math.Min(totalLen - totalRead + offset, partLen)]; //System.Console.WriteLine("LoadDict STEP 4.5"); Java.Lang.JavaSystem.Arraycopy(dictPart, j + 1, newDictPart, 0, offset); //Array.Copy(dictPart, j + 1, newDictPart, 0, offset); } else { offset = 0; } //System.Console.WriteLine("LoadDict STEP 5"); if (resaveEntries) { dictIndexes.Add(index); //System.Console.WriteLine("LoadDict STEP 6"); int i = 0; while (i <= j) { entries.Put(index++, i); while (i <= j && dictPart[i] < 0) { i++; } while (i <= j && dictPart[i] >= 0) { i++; } } } //System.Console.WriteLine("LoadDict STEP 7"); dictParts.Add(dictPart); dictPart = newDictPart; //System.Console.WriteLine("LoadDict STEP 8"); } idxBuf.Close(); } if (resaveEntries) { //System.Console.WriteLine("LoadDict STEP 9"); DataOutputStream entriesOut = null, partsOut = null; //System.Console.WriteLine("LoadDict STEP 10"); entriesOut = new DataOutputStream(context.OpenFileOutput("entries.bin", FileCreationMode.Private)); int count = entries.Capacity(); for (int i = 0; i < count; i++) { entriesOut.WriteInt(entries.Get(i)); } //System.Console.WriteLine("LoadDict STEP 11"); partsOut = new DataOutputStream(context.OpenFileOutput("parts.bin", FileCreationMode.Private)); foreach (int i in dictIndexes) { partsOut.WriteInt(i); } //System.Console.WriteLine("LoadDict STEP 12"); if (entriesOut != null) { entriesOut.Flush(); entriesOut.Close(); } if (partsOut != null) { partsOut.Flush(); partsOut.Close(); } } else { //System.Console.WriteLine("LoadDict NOW RESAVING ENTRIES"); string documentpath = System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal); Java.IO.File sdpath = global::Android.OS.Environment.GetExternalStoragePublicDirectory(global::Android.OS.Environment.DirectoryDownloads); File entriesDB = new File(documentpath, "entries.bin"); File partsDB = new File(documentpath, "parts.bin"); FileInputStream entriesIn = null, partsIn = null; //entriesIn = context.OpenFileInput("entries.bin"); entriesIn = new FileInputStream(entriesDB); //entriesIn = new FileInputStream(new File("entries.bin")); FileChannel file = entriesIn.Channel; ByteBuffer bb = ByteBuffer.Allocate(4 * 1649830); file.Read(bb); bb.Rewind(); entries = bb.AsIntBuffer(); file.Close(); partsIn = new FileInputStream(partsDB); //partsIn = new FileInputStream(new File("parts.bin")); //partsIn = (context.OpenFileInput("parts.bin"); file = partsIn.Channel; bb = ByteBuffer.Allocate((int)file.Size()); file.Read(bb); bb.Rewind(); IntBuffer ib = bb.AsIntBuffer(); int count = ib.Capacity(); //System.Console.WriteLine("LoadDict STEP 99 " + count); for (int i = 0; i < count; i++) { dictIndexes.Add(ib.Get(i)); } file.Close(); if (entriesIn != null) { entriesIn.Close(); } if (partsIn != null) { partsIn.Close(); } } } catch (Exception e) { System.Console.WriteLine("Dict LoadDict ERROR => " + e.Message); Log.Equals("chinesreader", e.Message); } byteBuffer = new byte[1090]; sharedPrefs = PreferenceManager.GetDefaultSharedPreferences(context); }
/// <exception cref="System.IO.IOException"/> internal override int Run(IList <string> args) { if (args.Count == 0) { System.Console.Out.WriteLine(this.usageText); System.Console.Out.WriteLine(this.helpText + "\n"); return(1); } string blockFile = StringUtils.PopOptionWithArgument("-block", args); string metaFile = StringUtils.PopOptionWithArgument("-meta", args); if (metaFile == null) { System.Console.Error.WriteLine("You must specify a meta file with -meta"); return(1); } FileInputStream metaStream = null; FileInputStream dataStream = null; FileChannel metaChannel = null; FileChannel dataChannel = null; DataInputStream checksumStream = null; try { BlockMetadataHeader header; try { metaStream = new FileInputStream(metaFile); checksumStream = new DataInputStream(metaStream); header = BlockMetadataHeader.ReadHeader(checksumStream); metaChannel = metaStream.GetChannel(); metaChannel.Position(DebugAdmin.HeaderLen); } catch (RuntimeException e) { System.Console.Error.WriteLine("Failed to read HDFS metadata file header for " + metaFile + ": " + StringUtils.StringifyException(e)); return(1); } catch (IOException e) { System.Console.Error.WriteLine("Failed to read HDFS metadata file header for " + metaFile + ": " + StringUtils.StringifyException(e)); return(1); } DataChecksum checksum = header.GetChecksum(); System.Console.Out.WriteLine("Checksum type: " + checksum.ToString()); if (blockFile == null) { return(0); } ByteBuffer metaBuf; ByteBuffer dataBuf; try { dataStream = new FileInputStream(blockFile); dataChannel = dataStream.GetChannel(); int ChecksumsPerBuf = 1024 * 32; metaBuf = ByteBuffer.Allocate(checksum.GetChecksumSize() * ChecksumsPerBuf); dataBuf = ByteBuffer.Allocate(checksum.GetBytesPerChecksum() * ChecksumsPerBuf); } catch (IOException e) { System.Console.Error.WriteLine("Failed to open HDFS block file for " + blockFile + ": " + StringUtils.StringifyException(e)); return(1); } long offset = 0; while (true) { dataBuf.Clear(); int dataRead = -1; try { dataRead = dataChannel.Read(dataBuf); if (dataRead < 0) { break; } } catch (IOException e) { System.Console.Error.WriteLine("Got I/O error reading block file " + blockFile + "from disk at offset " + dataChannel.Position() + ": " + StringUtils.StringifyException (e)); return(1); } try { int csumToRead = (((checksum.GetBytesPerChecksum() - 1) + dataRead) / checksum.GetBytesPerChecksum ()) * checksum.GetChecksumSize(); metaBuf.Clear(); metaBuf.Limit(csumToRead); metaChannel.Read(metaBuf); dataBuf.Flip(); metaBuf.Flip(); } catch (IOException e) { System.Console.Error.WriteLine("Got I/O error reading metadata file " + metaFile + "from disk at offset " + metaChannel.Position() + ": " + StringUtils.StringifyException (e)); return(1); } try { checksum.VerifyChunkedSums(dataBuf, metaBuf, blockFile, offset); } catch (IOException e) { System.Console.Out.WriteLine("verifyChunkedSums error: " + StringUtils.StringifyException (e)); return(1); } offset += dataRead; } System.Console.Out.WriteLine("Checksum verification succeeded on block file " + blockFile ); return(0); } finally { IOUtils.Cleanup(null, metaStream, dataStream, checksumStream); } }