コード例 #1
0
        /// <summary>Verify that the new current directory is the old previous.</summary>
        /// <remarks>
        /// Verify that the new current directory is the old previous.
        /// It is assumed that the server has recovered and rolled back.
        /// </remarks>
        /// <exception cref="System.Exception"/>
        internal virtual void CheckResult(HdfsServerConstants.NodeType nodeType, string[]
                                          baseDirs)
        {
            IList <FilePath> curDirs = Lists.NewArrayList();

            foreach (string baseDir in baseDirs)
            {
                FilePath curDir = new FilePath(baseDir, "current");
                curDirs.AddItem(curDir);
                switch (nodeType)
                {
                case HdfsServerConstants.NodeType.NameNode:
                {
                    FSImageTestUtil.AssertReasonableNameCurrentDir(curDir);
                    break;
                }

                case HdfsServerConstants.NodeType.DataNode:
                {
                    NUnit.Framework.Assert.AreEqual(UpgradeUtilities.ChecksumContents(nodeType, curDir
                                                                                      , false), UpgradeUtilities.ChecksumMasterDataNodeContents());
                    break;
                }
                }
            }
            FSImageTestUtil.AssertParallelFilesAreIdentical(curDirs, Sharpen.Collections.EmptySet
                                                            <string>());
            for (int i = 0; i < baseDirs.Length; i++)
            {
                NUnit.Framework.Assert.IsFalse(new FilePath(baseDirs[i], "previous").IsDirectory(
                                                   ));
            }
        }
コード例 #2
0
        /// <summary>
        /// Verify that the current directory exists and that the previous directory
        /// does not exist.
        /// </summary>
        /// <remarks>
        /// Verify that the current directory exists and that the previous directory
        /// does not exist.  Verify that current hasn't been modified by comparing
        /// the checksum of all it's containing files with their original checksum.
        /// </remarks>
        /// <exception cref="System.Exception"/>
        internal static void CheckResult(string[] nameNodeDirs, string[] dataNodeDirs, string
                                         bpid)
        {
            IList <FilePath> dirs = Lists.NewArrayList();

            for (int i = 0; i < nameNodeDirs.Length; i++)
            {
                FilePath curDir = new FilePath(nameNodeDirs[i], "current");
                dirs.AddItem(curDir);
                FSImageTestUtil.AssertReasonableNameCurrentDir(curDir);
            }
            FSImageTestUtil.AssertParallelFilesAreIdentical(dirs, Sharpen.Collections.EmptySet
                                                            <string>());
            FilePath[] dnCurDirs = new FilePath[dataNodeDirs.Length];
            for (int i_1 = 0; i_1 < dataNodeDirs.Length; i_1++)
            {
                dnCurDirs[i_1] = new FilePath(dataNodeDirs[i_1], "current");
                NUnit.Framework.Assert.AreEqual(UpgradeUtilities.ChecksumContents(HdfsServerConstants.NodeType
                                                                                  .DataNode, dnCurDirs[i_1], false), UpgradeUtilities.ChecksumMasterDataNodeContents
                                                    ());
            }
            for (int i_2 = 0; i_2 < nameNodeDirs.Length; i_2++)
            {
                NUnit.Framework.Assert.IsFalse(new FilePath(nameNodeDirs[i_2], "previous").IsDirectory
                                                   ());
            }
            if (bpid == null)
            {
                for (int i_3 = 0; i_3 < dataNodeDirs.Length; i_3++)
                {
                    NUnit.Framework.Assert.IsFalse(new FilePath(dataNodeDirs[i_3], "previous").IsDirectory
                                                       ());
                }
            }
            else
            {
                for (int i_3 = 0; i_3 < dataNodeDirs.Length; i_3++)
                {
                    FilePath bpRoot = BlockPoolSliceStorage.GetBpRoot(bpid, dnCurDirs[i_3]);
                    NUnit.Framework.Assert.IsFalse(new FilePath(bpRoot, "previous").IsDirectory());
                    FilePath bpCurFinalizeDir = new FilePath(bpRoot, "current/" + DataStorage.StorageDirFinalized
                                                             );
                    NUnit.Framework.Assert.AreEqual(UpgradeUtilities.ChecksumContents(HdfsServerConstants.NodeType
                                                                                      .DataNode, bpCurFinalizeDir, true), UpgradeUtilities.ChecksumMasterBlockPoolFinalizedContents
                                                        ());
                }
            }
        }
コード例 #3
0
        /// <summary>
        /// Test for the case when both of the NNs in the cluster are
        /// in the standby state, and thus are both creating checkpoints
        /// and uploading them to each other.
        /// </summary>
        /// <remarks>
        /// Test for the case when both of the NNs in the cluster are
        /// in the standby state, and thus are both creating checkpoints
        /// and uploading them to each other.
        /// In this circumstance, they should receive the error from the
        /// other node indicating that the other node already has a
        /// checkpoint for the given txid, but this should not cause
        /// an abort, etc.
        /// </remarks>
        /// <exception cref="System.Exception"/>
        public virtual void TestBothNodesInStandbyState()
        {
            DoEdits(0, 10);
            cluster.TransitionToStandby(0);
            // Transitioning to standby closed the edit log on the active,
            // so the standby will catch up. Then, both will be in standby mode
            // with enough uncheckpointed txns to cause a checkpoint, and they
            // will each try to take a checkpoint and upload to each other.
            HATestUtil.WaitForCheckpoint(cluster, 1, ImmutableList.Of(12));
            HATestUtil.WaitForCheckpoint(cluster, 0, ImmutableList.Of(12));
            NUnit.Framework.Assert.AreEqual(12, nn0.GetNamesystem().GetFSImage().GetMostRecentCheckpointTxId
                                                ());
            NUnit.Framework.Assert.AreEqual(12, nn1.GetNamesystem().GetFSImage().GetMostRecentCheckpointTxId
                                                ());
            IList <FilePath> dirs = Lists.NewArrayList();

            Sharpen.Collections.AddAll(dirs, FSImageTestUtil.GetNameNodeCurrentDirs(cluster,
                                                                                    0));
            Sharpen.Collections.AddAll(dirs, FSImageTestUtil.GetNameNodeCurrentDirs(cluster,
                                                                                    1));
            FSImageTestUtil.AssertParallelFilesAreIdentical(dirs, ImmutableSet.Of <string>());
        }