protected new void OneTimeSetUp() { ushort demoServerIndex = NameSpaceUris.GetIndexOrAppend(kDemoServer); Assume.That(demoServerIndex == kDemoServerIndex, $"Server Index: {demoServerIndex} != {kDemoServerIndex}"); }
public void FailureThrowsInconclusiveException_ActualAndConstraint() { Assume.That(2 + 2, Is.EqualTo(5)); }
public void FailureThrowsInconclusiveException_ActualAndConstraintWithMessageAndArgs() { Assume.That(2 + 2, Is.EqualTo(5), "Should be {0}", 5); }
public void AssumptionPasses_DelegateAndConstraintWithMessageAndArgs() { Assume.That(new ActualValueDelegate(ReturnsFour), Is.EqualTo(4), "Should be {0}", 4); }
public void FailureThrowsInconclusiveException_BooleanWithMessage() { Assume.That(2 + 2 == 5, "message"); }
public void ArrayAndEmptyArraySegment() { Assume.That(ArraySegmentImplementsIEnumerable); Assert.That(new int[] { 2, 3, 4 }, Is.Not.EqualTo(new ArraySegment <int>())); }
public void TwoEmptyArraySegments() { Assume.That(ArraySegmentImplementsIEnumerable); Assert.That(new ArraySegment <int>(), Is.EqualTo(new ArraySegment <int>())); }
/// <exception cref="System.IO.IOException"/> public virtual void RunBlockReaderLocalTest(TestBlockReaderLocal.BlockReaderLocalTest test, bool checksum, long readahead) { Assume.AssumeThat(DomainSocket.GetLoadingFailureReason(), CoreMatchers.EqualTo(null )); MiniDFSCluster cluster = null; HdfsConfiguration conf = new HdfsConfiguration(); conf.SetBoolean(DFSConfigKeys.DfsClientReadShortcircuitSkipChecksumKey, !checksum ); conf.SetLong(DFSConfigKeys.DfsBytesPerChecksumKey, TestBlockReaderLocal.BlockReaderLocalTest .BytesPerChecksum); conf.Set(DFSConfigKeys.DfsChecksumTypeKey, "CRC32C"); conf.SetLong(DFSConfigKeys.DfsClientCacheReadahead, readahead); test.SetConfiguration(conf); FileInputStream dataIn = null; FileInputStream metaIn = null; Path TestPath = new Path("/a"); long RandomSeed = 4567L; BlockReaderLocal blockReaderLocal = null; FSDataInputStream fsIn = null; byte[] original = new byte[TestBlockReaderLocal.BlockReaderLocalTest.TestLength]; FileSystem fs = null; ShortCircuitShm shm = null; RandomAccessFile raf = null; try { cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(1).Build(); cluster.WaitActive(); fs = cluster.GetFileSystem(); DFSTestUtil.CreateFile(fs, TestPath, TestBlockReaderLocal.BlockReaderLocalTest.TestLength , (short)1, RandomSeed); try { DFSTestUtil.WaitReplication(fs, TestPath, (short)1); } catch (Exception e) { NUnit.Framework.Assert.Fail("unexpected InterruptedException during " + "waitReplication: " + e); } catch (TimeoutException e) { NUnit.Framework.Assert.Fail("unexpected TimeoutException during " + "waitReplication: " + e); } fsIn = fs.Open(TestPath); IOUtils.ReadFully(fsIn, original, 0, TestBlockReaderLocal.BlockReaderLocalTest.TestLength ); fsIn.Close(); fsIn = null; ExtendedBlock block = DFSTestUtil.GetFirstBlock(fs, TestPath); FilePath dataFile = cluster.GetBlockFile(0, block); FilePath metaFile = cluster.GetBlockMetadataFile(0, block); ShortCircuitCache shortCircuitCache = ClientContext.GetFromConf(conf).GetShortCircuitCache (); cluster.Shutdown(); cluster = null; test.Setup(dataFile, checksum); FileInputStream[] streams = new FileInputStream[] { new FileInputStream(dataFile) , new FileInputStream(metaFile) }; dataIn = streams[0]; metaIn = streams[1]; ExtendedBlockId key = new ExtendedBlockId(block.GetBlockId(), block.GetBlockPoolId ()); raf = new RandomAccessFile(new FilePath(sockDir.GetDir().GetAbsolutePath(), UUID. RandomUUID().ToString()), "rw"); raf.SetLength(8192); FileInputStream shmStream = new FileInputStream(raf.GetFD()); shm = new ShortCircuitShm(ShortCircuitShm.ShmId.CreateRandom(), shmStream); ShortCircuitReplica replica = new ShortCircuitReplica(key, dataIn, metaIn, shortCircuitCache , Time.Now(), shm.AllocAndRegisterSlot(ExtendedBlockId.FromExtendedBlock(block)) ); blockReaderLocal = new BlockReaderLocal.Builder(new DFSClient.Conf(conf)).SetFilename (TestPath.GetName()).SetBlock(block).SetShortCircuitReplica(replica).SetCachingStrategy (new CachingStrategy(false, readahead)).SetVerifyChecksum(checksum).Build(); dataIn = null; metaIn = null; test.DoTest(blockReaderLocal, original); // BlockReaderLocal should not alter the file position. NUnit.Framework.Assert.AreEqual(0, streams[0].GetChannel().Position()); NUnit.Framework.Assert.AreEqual(0, streams[1].GetChannel().Position()); } finally { if (fsIn != null) { fsIn.Close(); } if (fs != null) { fs.Close(); } if (cluster != null) { cluster.Shutdown(); } if (dataIn != null) { dataIn.Close(); } if (metaIn != null) { metaIn.Close(); } if (blockReaderLocal != null) { blockReaderLocal.Close(); } if (shm != null) { shm.Free(); } if (raf != null) { raf.Close(); } } }
/// <exception cref="System.Exception"/> private void TestStatistics(bool isShortCircuit) { Assume.AssumeTrue(DomainSocket.GetLoadingFailureReason() == null); HdfsConfiguration conf = new HdfsConfiguration(); TemporarySocketDirectory sockDir = null; if (isShortCircuit) { DFSInputStream.tcpReadsDisabledForTesting = true; sockDir = new TemporarySocketDirectory(); conf.Set(DFSConfigKeys.DfsDomainSocketPathKey, new FilePath(sockDir.GetDir(), "TestStatisticsForLocalRead.%d.sock" ).GetAbsolutePath()); conf.SetBoolean(DFSConfigKeys.DfsClientReadShortcircuitKey, true); DomainSocket.DisableBindPathValidation(); } else { conf.SetBoolean(DFSConfigKeys.DfsClientReadShortcircuitKey, false); } MiniDFSCluster cluster = null; Path TestPath = new Path("/a"); long RandomSeed = 4567L; FSDataInputStream fsIn = null; byte[] original = new byte[TestBlockReaderLocal.BlockReaderLocalTest.TestLength]; FileSystem fs = null; try { cluster = new MiniDFSCluster.Builder(conf).NumDataNodes(1).Build(); cluster.WaitActive(); fs = cluster.GetFileSystem(); DFSTestUtil.CreateFile(fs, TestPath, TestBlockReaderLocal.BlockReaderLocalTest.TestLength , (short)1, RandomSeed); try { DFSTestUtil.WaitReplication(fs, TestPath, (short)1); } catch (Exception e) { NUnit.Framework.Assert.Fail("unexpected InterruptedException during " + "waitReplication: " + e); } catch (TimeoutException e) { NUnit.Framework.Assert.Fail("unexpected TimeoutException during " + "waitReplication: " + e); } fsIn = fs.Open(TestPath); IOUtils.ReadFully(fsIn, original, 0, TestBlockReaderLocal.BlockReaderLocalTest.TestLength ); HdfsDataInputStream dfsIn = (HdfsDataInputStream)fsIn; NUnit.Framework.Assert.AreEqual(TestBlockReaderLocal.BlockReaderLocalTest.TestLength , dfsIn.GetReadStatistics().GetTotalBytesRead()); NUnit.Framework.Assert.AreEqual(TestBlockReaderLocal.BlockReaderLocalTest.TestLength , dfsIn.GetReadStatistics().GetTotalLocalBytesRead()); if (isShortCircuit) { NUnit.Framework.Assert.AreEqual(TestBlockReaderLocal.BlockReaderLocalTest.TestLength , dfsIn.GetReadStatistics().GetTotalShortCircuitBytesRead()); } else { NUnit.Framework.Assert.AreEqual(0, dfsIn.GetReadStatistics().GetTotalShortCircuitBytesRead ()); } fsIn.Close(); fsIn = null; } finally { DFSInputStream.tcpReadsDisabledForTesting = false; if (fsIn != null) { fsIn.Close(); } if (fs != null) { fs.Close(); } if (cluster != null) { cluster.Shutdown(); } if (sockDir != null) { sockDir.Close(); } } }
public void Document_SecondChildIsComment() { Assume.That(doc.ChildNodes.Count >= 2); Assert.That(doc.ChildNodes[1].Name, Is.EqualTo("#comment")); }
public void Document_ThirdChildIsTestResults() { Assume.That(doc.ChildNodes.Count >= 3); Assert.That(doc.ChildNodes[2].Name, Is.EqualTo("test-results")); }
public void Document_FirstChildIsXmlDeclaration() { Assume.That(doc.FirstChild != null); Assert.That(doc.FirstChild.NodeType, Is.EqualTo(XmlNodeType.XmlDeclaration)); Assert.That(doc.FirstChild.Name, Is.EqualTo("xml")); }
public void SetUp() { _constraint = new FileOrDirectoryExistsConstraint(); _goodDir = new TestDirectory(); Assume.That(BAD_DIRECTORY, Does.Not.Exist, BAD_DIRECTORY + " exists"); }
public void SingleGroupTest( [ValueSource(nameof(Blocks_s))] LongRanges blocks, [ValueSource(nameof(BufferSizes))] int usingBufferSize) { Assume.That(usingBufferSize >= blocks.Value.Max(o => o.Length)); const string groupsFilePath = "ZZZZzzzzZZZZzzzzZZZ"; const int buffersMemoryLimit = 1024 * 1024; var configMock = new Mock <IConfig>(); var physicalBufferLength = usingBufferSize + Consts.BufferReadingEnsurance; configMock .SetupGet(o => o.PhysicalBufferLength) .Returns(physicalBufferLength); configMock .SetupGet(o => o.UsingBufferLength) .Returns(usingBufferSize); configMock .SetupGet(o => o.GroupsFilePath) .Returns(groupsFilePath); var bytesCount = blocks.Value.Sum(o => o.Length); var linesCount = bytesCount / 4; // max var buffersPool = new InfinityBuffersPool( physicalBufferLength, buffersMemoryLimit); var lastBlock = blocks.Value[blocks.Value.Length - 1]; var inputSize = lastBlock.Offset + lastBlock.Length; var input = new byte[inputSize]; var random = new Random(); foreach (var block in blocks.Value) { var blockContent = new byte[block.Length]; random.NextBytes(blockContent); Array.Copy(blockContent, 0, input, block.Offset, block.Length); } var ioServiceMock = new Mock <IIoService>(); ioServiceMock .Setup(o => o.OpenRead(groupsFilePath, 0L)) .Returns(() => new MemoryReader(new MemoryStream(input))); IGroupsLoaderFactory loaderMaker = new GroupsLoaderFactory( buffersPool, ioServiceMock.Object, configMock.Object); var groupIndex = random.Next(Consts.MaxGroupsCount); var infos = new GroupInfo[Consts.MaxGroupsCount]; var groups = new IGroup[Consts.MaxGroupsCount]; infos[groupIndex] = new GroupInfo { BytesCount = bytesCount, LinesCount = linesCount, Mapping = blocks.Value }; var loader = loaderMaker.Create(infos, groups); var range = loader.LoadNextGroups(); Assert.AreEqual(0, range.Offset); Assert.AreEqual(Consts.MaxGroupsCount, range.Length); Assert.IsTrue(infos .Where((_, i) => i != groupIndex) .All(GroupInfo.IsZero)); Assert.IsTrue(groups .Where((_, i) => i != groupIndex) .All(o => o == null)); var actualGroup = groups[groupIndex]; Assert.IsNotNull(actualGroup); Assert.AreEqual(0, actualGroup.Lines.Offset); Assert.AreEqual(linesCount, actualGroup.Lines.Count); Assert.AreEqual(0, actualGroup.SortingSegments.Offset); Assert.AreEqual(linesCount, actualGroup.SortingSegments.Count); Assert.GreaterOrEqual(actualGroup.Lines.Array.Length, linesCount); Assert.GreaterOrEqual(actualGroup.SortingSegments.Array.Length, linesCount); var actualGroupBytes = new byte[actualGroup.BytesCount]; var buffers = actualGroup.Buffers; int j = 0; for (; j < buffers.Count - 1; j++) { Array.Copy(buffers.Array[buffers.Offset + j], 0, actualGroupBytes, j * usingBufferSize, usingBufferSize); } Array.Copy(buffers.Array[buffers.Offset + j], 0, actualGroupBytes, j * usingBufferSize, actualGroup.BytesCount - j * usingBufferSize); var expectedGroupBytes = blocks.Value .Select(block => input.Skip((int)block.Offset) .Take(block.Length)) .Aggregate(Enumerable.Concat) .ToArray(); // Console.WriteLine(string.Join(" ", expectedGroupBytes)); // Console.WriteLine(string.Join(" ", actualGroupBytes)); CollectionAssert.AreEqual( expectedGroupBytes, actualGroupBytes); }
public void NegativeEqualityTestWithToleranceAndWithSameOffset(DateTimeOffset value1, DateTimeOffset value2) { Assume.That((value1 - value2).Duration() > new TimeSpan(0, 1, 0)); Assert.That(value1, Is.Not.EqualTo(value2).Within(1).Minutes.WithSameOffset); }
private void WatersImsMseChromatogramTest(DriftFilterType mode, IonMobilityWindowWidthCalculator.IonMobilityPeakWidthType driftPeakWidthCalcType, RefinementSettings.ConvertToSmallMoleculesMode asSmallMolecules = RefinementSettings.ConvertToSmallMoleculesMode.none) { if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none && !RunSmallMoleculeTestVersions) { Console.Write(MSG_SKIPPING_SMALLMOLECULE_TEST_VERSION); return; } string subdir = (asSmallMolecules == RefinementSettings.ConvertToSmallMoleculesMode.none) ? null : asSmallMolecules.ToString(); var testFilesDir = new TestFilesDir(TestContext, ZIP_FILE, subdir); TestSmallMolecules = false; // Don't need that extra magic node bool withDriftTimePredictor = (mode == DriftFilterType.predictor); // Load the doc that has a drift time predictor? bool withDriftTimeFilter = (mode != DriftFilterType.none); // Perform drift time filtering? (either with predictor, or with bare times in blib file) string docPath; SrmDocument document = InitWatersImsMseDocument(testFilesDir, driftPeakWidthCalcType, withDriftTimeFilter, withDriftTimePredictor, out docPath); AssertEx.IsDocumentState(document, null, 1, 1, 1, 8); // Drift time lib load bumps the doc version, so does small mol conversion var listChromatograms = new List <ChromatogramSet>(); // A small subset of the QC_HDMSE_02_UCA168_3495_082213 data set (RT 21.5-22.5) from Will Thompson string mz5Path = "waters-mobility" + ExtensionTestContext.ExtMz5; string testModeStr = withDriftTimePredictor ? "with drift time predictor" : "without drift time info"; if (withDriftTimeFilter && !withDriftTimePredictor) { testModeStr = "with drift times from spectral library"; } listChromatograms.Add(AssertResult.FindChromatogramSet(document, new MsDataFilePath(mz5Path)) ?? new ChromatogramSet(Path.GetFileName(mz5Path).Replace('.', '_'), new[] { mz5Path })); using (var docContainer = new ResultsTestDocumentContainer(document, docPath)) { var doc = docContainer.Document; var docResults = doc.ChangeMeasuredResults(new MeasuredResults(listChromatograms)); Assume.IsTrue(docContainer.SetDocument(docResults, doc, true)); docContainer.AssertComplete(); document = docContainer.Document; } document = ConvertToSmallMolecules(document, ref docPath, new[] { mz5Path }, asSmallMolecules); using (var docContainer = new ResultsTestDocumentContainer(document, docPath)) { float tolerance = (float)document.Settings.TransitionSettings.Instrument.MzMatchTolerance; double maxHeight = 0; var results = document.Settings.MeasuredResults; Assume.AreEqual(1, document.MoleculePrecursorPairs.Count()); foreach (var pair in document.MoleculePrecursorPairs) { ChromatogramGroupInfo[] chromGroupInfo; Assume.IsTrue(results.TryLoadChromatogram(0, pair.NodePep, pair.NodeGroup, tolerance, true, out chromGroupInfo)); Assume.AreEqual(1, chromGroupInfo.Length, testModeStr); var chromGroup = chromGroupInfo[0]; int expectedPeaks; if (withDriftTimeFilter) { expectedPeaks = 3; } else if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.masses_only) { expectedPeaks = 5; } else { expectedPeaks = 6; // No libraries } Assume.AreEqual(expectedPeaks, chromGroup.NumPeaks, testModeStr); // This will be higher if we don't filter on DT foreach (var tranInfo in chromGroup.TransitionPointSets) { maxHeight = Math.Max(maxHeight, tranInfo.MaxIntensity); } } Assume.AreEqual(withDriftTimeFilter ? 5226 : 20075, maxHeight, 1, testModeStr); // Without DT filtering, this will be much greater // now drill down for specific values int nPeptides = 0; foreach (var nodePep in document.Molecules.Where(nodePep => !nodePep.Results[0].IsEmpty)) { // expecting just one peptide result in this small data set if (nodePep.Results[0].Sum(chromInfo => chromInfo.PeakCountRatio > 0 ? 1 : 0) > 0) { Assume.AreEqual(21.94865, (double)nodePep.GetMeasuredRetentionTime(0), .0001, testModeStr); Assume.AreEqual(1.0, (double)nodePep.GetPeakCountRatio(0), 0.0001, testModeStr); nPeptides++; } } Assume.AreEqual(1, nPeptides); if (withDriftTimePredictor || withDriftTimeFilter) { // Verify that the .imdb pr .blib file goes out in the share zipfile for (int complete = 0; complete <= 1; complete++) { var sharePath = testFilesDir.GetTestPath(complete == 1 ? "share_complete.zip" : "share_minimized.zip"); var share = new SrmDocumentSharing(document, docPath, sharePath, new ShareType(complete == 1, SkylineVersion.CURRENT)); // Explicitly declaring version number forces a save before zip share.Share(new SilentProgressMonitor()); var files = share.ListEntries().ToArray(); var imdbFile = withDriftTimePredictor ? "scaled.imdb" : "waters-mobility.filtered-scaled.blib"; if (asSmallMolecules != RefinementSettings.ConvertToSmallMoleculesMode.none) { var ext = "." + imdbFile.Split('.').Last(); imdbFile = imdbFile.Replace(ext, BiblioSpecLiteSpec.DotConvertedToSmallMolecules + ext); } Assume.IsTrue(files.Contains(imdbFile)); // And round trip it to make sure we haven't left out any new features in minimized imdb or blib files share.Extract(new SilentProgressMonitor()); using (var cmdline = new CommandLine()) { Assume.IsTrue(cmdline.OpenSkyFile(share.DocumentPath)); // Handles any path shifts in database files, like our .imdb file var document2 = cmdline.Document; Assume.IsNotNull(document2); Assume.IsTrue(docContainer.SetDocument(document2, docContainer.Document, true)); docContainer.AssertComplete(); document2 = docContainer.Document; var im = document2.Settings.GetIonMobilities(new MsDataFilePath(mz5Path)); var pep = document2.Molecules.First(); foreach (TransitionGroupDocNode nodeGroup in pep.Children) { double windowDT; var centerDriftTime = document2.Settings.GetIonMobility( pep, nodeGroup, null, im, null, driftTimeMax, out windowDT); Assume.AreEqual(3.86124, centerDriftTime.IonMobility.Mobility.Value, .0001, testModeStr); Assume.AreEqual(0.077224865797235934, windowDT, .0001, testModeStr); } } } } } }
public void ArrayAndArraySegment() { Assume.That(ArraySegmentImplementsIEnumerable); Assert.That(new int[] { 2, 3, 4 }, Is.EqualTo(new ArraySegment <int>(underlyingArray, 1, 3))); }
public async Task Setup() { Assume.That(_initializedOnce, Is.Not.Null); _initializedEveryTime = new object(); }
public void TwoArraySegments() { Assume.That(ArraySegmentImplementsIEnumerable); Assert.That(new ArraySegment <int>(underlyingArray, 1, 3), Is.EqualTo(new ArraySegment <int>(underlyingArray, 1, 3))); }
public async Task TearDown() { Assume.That(_initializedEveryTime, Is.Null); _initializedOnce = null; }
public void AssumptionPasses_DelegateAndConstraintWithMessage() { Assume.That(new ActualValueDelegate(ReturnsFour), Is.EqualTo(4), "Message"); }
public async Task OneTimeTearDown() { Assume.That(_initializedOnce, Is.Null); }
public void FailureThrowsInconclusiveException_Boolean() { Assume.That(2 + 2 == 5); }
public void PositiveEqualityTest(DateTimeOffset value1, DateTimeOffset value2) { Assume.That(value1 == value2); Assert.That(value1, Is.EqualTo(value2)); }
public void FailureThrowsInconclusiveException_BooleanWithMessageAndArgs() { Assume.That(2 + 2 == 5, "got {0}", 5); }
public void NegativeEqualityTest(DateTimeOffset value1, DateTimeOffset value2) { Assume.That(value1 != value2); Assert.That(value1, Is.Not.EqualTo(value2)); }
public void FailureThrowsInconclusiveException_ActualAndConstraintWithMessage() { Assume.That(2 + 2, Is.EqualTo(5), "Error"); }
public void PositiveEqualityTestWithTolerance(DateTimeOffset value1, DateTimeOffset value2) { Assume.That((value1 - value2).Duration() <= new TimeSpan(0, 1, 0)); Assert.That(value1, Is.EqualTo(value2).Within(1).Minutes); }
public void FailureThrowsInconclusiveException_ReferenceAndConstraintWithMessage() { bool value = false; Assume.That(ref value, Is.True, "message"); }
/// <summary> /// Initializes a new instance of the <see cref="SelectorControlFlow"/> class. /// </summary> public SelectorControlFlow(CombinatorBase combinator) { Assume.NotNull(combinator, nameof(combinator)); Combinator = new RelativeNodeCombinator(new UniversalCombinator(), combinator); }