public virtual void TestBadChunks() { FilePath fn = GetTestFile(); int byteAm = 10000; FileOutputStream fout = new FileOutputStream(fn); byte[] bytes = new byte[byteAm]; rnd.NextBytes(bytes); fout.Write(bytes); fout.Close(); // attempt to read it DataVerifier vf = new DataVerifier(); DataVerifier.VerifyOutput vout = new DataVerifier.VerifyOutput(0, 0, 0, 0); DataInputStream @in = null; try { @in = new DataInputStream(new FileInputStream(fn)); vout = vf.VerifyFile(byteAm, @in); } catch (Exception) { } finally { if (@in != null) { @in.Close(); } } NUnit.Framework.Assert.IsTrue(vout.GetChunksSame() == 0); }
public virtual void TestDataWriting() { long byteAm = 100; FilePath fn = GetTestFile(); DataWriter writer = new DataWriter(rnd); FileOutputStream fs = new FileOutputStream(fn); DataWriter.GenerateOutput ostat = writer.WriteSegment(byteAm, fs); Log.Info(ostat); fs.Close(); NUnit.Framework.Assert.IsTrue(ostat.GetBytesWritten() == byteAm); DataVerifier vf = new DataVerifier(); FileInputStream fin = new FileInputStream(fn); DataVerifier.VerifyOutput vfout = vf.VerifyFile(byteAm, new DataInputStream(fin)); Log.Info(vfout); fin.Close(); NUnit.Framework.Assert.AreEqual(vfout.GetBytesRead(), byteAm); NUnit.Framework.Assert.IsTrue(vfout.GetChunksDifferent() == 0); }
internal override IList <OperationOutput> Run(FileSystem fs) { // Operation IList <OperationOutput> @out = base.Run(fs); DataInputStream @is = null; try { Path fn = GetReadFile(); Range <long> readSizeRange = GetConfig().GetReadSize(); long readSize = 0; string readStrAm = string.Empty; if (GetConfig().ShouldReadFullFile()) { readSize = long.MaxValue; readStrAm = "full file"; } else { readSize = Range.BetweenPositive(GetRandom(), readSizeRange); readStrAm = Helper.ToByteInfo(readSize); } long timeTaken = 0; long chunkSame = 0; long chunkDiff = 0; long bytesRead = 0; long startTime = 0; DataVerifier vf = new DataVerifier(); Log.Info("Attempting to read file at " + fn + " of size (" + readStrAm + ")"); { // open startTime = Timer.Now(); @is = fs.Open(fn); timeTaken += Timer.Elapsed(startTime); // read & verify DataVerifier.VerifyOutput vo = vf.VerifyFile(readSize, @is); timeTaken += vo.GetReadTime(); chunkSame += vo.GetChunksSame(); chunkDiff += vo.GetChunksDifferent(); bytesRead += vo.GetBytesRead(); // capture close time startTime = Timer.Now(); @is.Close(); @is = null; timeTaken += Timer.Elapsed(startTime); } @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .OkTimeTaken, timeTaken)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .BytesRead, bytesRead)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Successes, 1L)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .ChunksVerified, chunkSame)); @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .ChunksUnverified, chunkDiff)); Log.Info("Read " + Helper.ToByteInfo(bytesRead) + " of " + fn + " with " + chunkSame + " chunks being same as expected and " + chunkDiff + " chunks being different than expected in " + timeTaken + " milliseconds"); } catch (FileNotFoundException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .NotFound, 1L)); Log.Warn("Error with reading", e); } catch (BadFileException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .BadFiles, 1L)); Log.Warn("Error reading bad file", e); } catch (IOException e) { @out.AddItem(new OperationOutput(OperationOutput.OutputType.Long, GetType(), ReportWriter .Failures, 1L)); Log.Warn("Error reading", e); } finally { if (@is != null) { try { @is.Close(); } catch (IOException e) { Log.Warn("Error closing read stream", e); } } } return(@out); }