public virtual void TestInitialState() { StartupProgressView view = startupProgress.CreateView(); NUnit.Framework.Assert.IsNotNull(view); NUnit.Framework.Assert.AreEqual(0L, view.GetElapsedTime()); NUnit.Framework.Assert.AreEqual(0.0f, view.GetPercentComplete(), 0.001f); IList <Phase> phases = new AList <Phase>(); foreach (Phase phase in view.GetPhases()) { phases.AddItem(phase); NUnit.Framework.Assert.AreEqual(0L, view.GetElapsedTime(phase)); NUnit.Framework.Assert.IsNull(view.GetFile(phase)); NUnit.Framework.Assert.AreEqual(0.0f, view.GetPercentComplete(phase), 0.001f); NUnit.Framework.Assert.AreEqual(long.MinValue, view.GetSize(phase)); NUnit.Framework.Assert.AreEqual(Status.Pending, view.GetStatus(phase)); NUnit.Framework.Assert.AreEqual(0L, view.GetTotal(phase)); foreach (Step step in view.GetSteps(phase)) { NUnit.Framework.Assert.Fail(string.Format("unexpected step %s in phase %s at initial state" , step, phase)); } } Assert.AssertArrayEquals(Sharpen.Collections.ToArray(EnumSet.AllOf <Phase>()), Sharpen.Collections.ToArray (phases)); }
public virtual void TestQueryAll() { WebResource r = Resource(); MockNM nm1 = rm.RegisterNode("h1:1234", 5120); MockNM nm2 = rm.RegisterNode("h2:1235", 5121); MockNM nm3 = rm.RegisterNode("h3:1236", 5122); rm.SendNodeStarted(nm1); rm.SendNodeStarted(nm3); rm.NMwaitForState(nm1.GetNodeId(), NodeState.Running); rm.NMwaitForState(nm2.GetNodeId(), NodeState.New); rm.SendNodeLost(nm3); ClientResponse response = r.Path("ws").Path("v1").Path("cluster").Path("nodes").QueryParam ("states", Joiner.On(',').Join(EnumSet.AllOf <NodeState>())).Accept(MediaType.ApplicationJson ).Get <ClientResponse>(); NUnit.Framework.Assert.AreEqual(MediaType.ApplicationJsonType, response.GetType() ); JSONObject json = response.GetEntity <JSONObject>(); JSONObject nodes = json.GetJSONObject("nodes"); NUnit.Framework.Assert.AreEqual("incorrect number of elements", 1, nodes.Length() ); JSONArray nodeArray = nodes.GetJSONArray("node"); NUnit.Framework.Assert.AreEqual("incorrect number of elements", 3, nodeArray.Length ()); }
public virtual void TestPutEntities() { TestTimelineWebServicesWithSSL.TestTimelineClient client = new TestTimelineWebServicesWithSSL.TestTimelineClient (); try { client.Init(conf); client.Start(); TimelineEntity expectedEntity = new TimelineEntity(); expectedEntity.SetEntityType("test entity type"); expectedEntity.SetEntityId("test entity id"); expectedEntity.SetDomainId("test domain id"); TimelineEvent @event = new TimelineEvent(); @event.SetEventType("test event type"); @event.SetTimestamp(0L); expectedEntity.AddEvent(@event); TimelinePutResponse response = client.PutEntities(expectedEntity); NUnit.Framework.Assert.AreEqual(0, response.GetErrors().Count); NUnit.Framework.Assert.IsTrue(client.resp.ToString().Contains("https")); TimelineEntity actualEntity = store.GetEntity(expectedEntity.GetEntityId(), expectedEntity .GetEntityType(), EnumSet.AllOf <TimelineReader.Field>()); NUnit.Framework.Assert.IsNotNull(actualEntity); NUnit.Framework.Assert.AreEqual(expectedEntity.GetEntityId(), actualEntity.GetEntityId ()); NUnit.Framework.Assert.AreEqual(expectedEntity.GetEntityType(), actualEntity.GetEntityType ()); } finally { client.Stop(); client.Close(); } }
/// <summary> /// Creates a new StartupProgress by initializing internal data structure for /// tracking progress of all defined phases. /// </summary> public StartupProgress() { foreach (Phase phase in EnumSet.AllOf <Phase>()) { phases[phase] = new PhaseTracking(); } }
/// <summary> /// Returns true if the entire startup process has completed, determined by /// checking if each phase is complete. /// </summary> /// <returns>boolean true if the entire startup process has completed</returns> private bool IsComplete() { foreach (Phase phase in EnumSet.AllOf <Phase>()) { if (GetStatus(phase) != Status.Complete) { return(false); } } return(true); }
public virtual void TestWritable() { TestWritable.TestWritable(new BlockTokenIdentifier()); BlockTokenSecretManager sm = new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime, 0, "fake-pool", null); TestWritable.TestWritable(GenerateTokenId(sm, block1, EnumSet.AllOf <BlockTokenSecretManager.AccessMode >())); TestWritable.TestWritable(GenerateTokenId(sm, block2, EnumSet.Of(BlockTokenSecretManager.AccessMode .Write))); TestWritable.TestWritable(GenerateTokenId(sm, block3, EnumSet.NoneOf <BlockTokenSecretManager.AccessMode >())); }
public virtual void TestCompression() { EnumSet <SequenceFile.CompressionType> seq = EnumSet.AllOf <SequenceFile.CompressionType >(); foreach (SequenceFile.CompressionType redCompression in seq) { for (int combine = 0; combine < 2; ++combine) { CheckCompression(false, redCompression, combine == 1); CheckCompression(true, redCompression, combine == 1); } } }
public static EnumSet <T> Rem <T>(EnumSet <T> set, string s) where T : Enum <T> { System.Type c = typeof(T); if (null != fullmap[c] && fullmap[c][s] != null) { if (null == set) { set = EnumSet.AllOf(c); } set.Remove(fullmap[c][s]); } return(set); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public override IList <NodeReport> GetNodeReports(params NodeState[] states) { EnumSet <NodeState> statesSet = (states.Length == 0) ? EnumSet.AllOf <NodeState>() : EnumSet.NoneOf <NodeState>(); foreach (NodeState state in states) { statesSet.AddItem(state); } GetClusterNodesRequest request = GetClusterNodesRequest.NewInstance(statesSet); GetClusterNodesResponse response = rmClient.GetClusterNodes(request); return(response.GetNodeReports()); }
public virtual TimelineEntity GetEntity(string entityId, string entityType, EnumSet <TimelineReader.Field> fieldsToRetrieve) { lock (this) { if (fieldsToRetrieve == null) { fieldsToRetrieve = EnumSet.AllOf <TimelineReader.Field>(); } TimelineEntity entity = entities[new EntityIdentifier(entityId, entityType)]; if (entity == null) { return(null); } else { return(MaskFields(entity, fieldsToRetrieve)); } } }
public virtual void TestBlockTokenRpcLeak() { Configuration conf = new Configuration(); conf.Set(CommonConfigurationKeysPublic.HadoopSecurityAuthentication, "kerberos"); UserGroupInformation.SetConfiguration(conf); Assume.AssumeTrue(FdDir.Exists()); BlockTokenSecretManager sm = new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime, 0, "fake-pool", null); Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> token = sm.GenerateToken (block3, EnumSet.AllOf <BlockTokenSecretManager.AccessMode>()); Server server = CreateMockDatanode(sm, token, conf); server.Start(); IPEndPoint addr = NetUtils.GetConnectAddress(server); DatanodeID fakeDnId = DFSTestUtil.GetLocalDatanodeID(addr.Port); ExtendedBlock b = new ExtendedBlock("fake-pool", new Org.Apache.Hadoop.Hdfs.Protocol.Block (12345L)); LocatedBlock fakeBlock = new LocatedBlock(b, new DatanodeInfo[0]); fakeBlock.SetBlockToken(token); // Create another RPC proxy with the same configuration - this will never // attempt to connect anywhere -- but it causes the refcount on the // RPC "Client" object to stay above 0 such that RPC.stopProxy doesn't // actually close the TCP connections to the real target DN. ClientDatanodeProtocol proxyToNoWhere = RPC.GetProxy <ClientDatanodeProtocol>(ClientDatanodeProtocol .versionID, new IPEndPoint("1.1.1.1", 1), UserGroupInformation.CreateRemoteUser( "junk"), conf, NetUtils.GetDefaultSocketFactory(conf)); ClientDatanodeProtocol proxy = null; int fdsAtStart = CountOpenFileDescriptors(); try { long endTime = Time.Now() + 3000; while (Time.Now() < endTime) { proxy = DFSUtil.CreateClientDatanodeProtocolProxy(fakeDnId, conf, 1000, false, fakeBlock ); NUnit.Framework.Assert.AreEqual(block3.GetBlockId(), proxy.GetReplicaVisibleLength (block3)); if (proxy != null) { RPC.StopProxy(proxy); } Log.Info("Num open fds:" + CountOpenFileDescriptors()); } int fdsAtEnd = CountOpenFileDescriptors(); if (fdsAtEnd - fdsAtStart > 50) { NUnit.Framework.Assert.Fail("Leaked " + (fdsAtEnd - fdsAtStart) + " fds!"); } } finally { server.Stop(); } RPC.StopProxy(proxyToNoWhere); }
public virtual void TestBlockTokenRpc() { Configuration conf = new Configuration(); conf.Set(CommonConfigurationKeysPublic.HadoopSecurityAuthentication, "kerberos"); UserGroupInformation.SetConfiguration(conf); BlockTokenSecretManager sm = new BlockTokenSecretManager(blockKeyUpdateInterval, blockTokenLifetime, 0, "fake-pool", null); Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> token = sm.GenerateToken (block3, EnumSet.AllOf <BlockTokenSecretManager.AccessMode>()); Server server = CreateMockDatanode(sm, token, conf); server.Start(); IPEndPoint addr = NetUtils.GetConnectAddress(server); UserGroupInformation ticket = UserGroupInformation.CreateRemoteUser(block3.ToString ()); ticket.AddToken(token); ClientDatanodeProtocol proxy = null; try { proxy = DFSUtil.CreateClientDatanodeProtocolProxy(addr, ticket, conf, NetUtils.GetDefaultSocketFactory (conf)); NUnit.Framework.Assert.AreEqual(block3.GetBlockId(), proxy.GetReplicaVisibleLength (block3)); } finally { server.Stop(); if (proxy != null) { RPC.StopProxy(proxy); } } }
/// <exception cref="System.Exception"/> private void TokenGenerationAndVerification(BlockTokenSecretManager master, BlockTokenSecretManager slave) { // single-mode tokens foreach (BlockTokenSecretManager.AccessMode mode in BlockTokenSecretManager.AccessMode .Values()) { // generated by master Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> token1 = master.GenerateToken (block1, EnumSet.Of(mode)); master.CheckAccess(token1, null, block1, mode); slave.CheckAccess(token1, null, block1, mode); // generated by slave Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> token2 = slave.GenerateToken (block2, EnumSet.Of(mode)); master.CheckAccess(token2, null, block2, mode); slave.CheckAccess(token2, null, block2, mode); } // multi-mode tokens Org.Apache.Hadoop.Security.Token.Token <BlockTokenIdentifier> mtoken = master.GenerateToken (block3, EnumSet.AllOf <BlockTokenSecretManager.AccessMode>()); foreach (BlockTokenSecretManager.AccessMode mode_1 in BlockTokenSecretManager.AccessMode .Values()) { master.CheckAccess(mtoken, null, block3, mode_1); slave.CheckAccess(mtoken, null, block3, mode_1); } }
/// <exception cref="System.Exception"/> public virtual void TestPublishApplicationMetrics() { for (int i = 1; i <= 2; ++i) { ApplicationId appId = ApplicationId.NewInstance(0, i); RMApp app = CreateRMApp(appId); metricsPublisher.AppCreated(app, app.GetStartTime()); metricsPublisher.AppFinished(app, RMAppState.Finished, app.GetFinishTime()); if (i == 1) { metricsPublisher.AppACLsUpdated(app, "uers1,user2", 4L); } else { // in case user doesn't specify the ACLs metricsPublisher.AppACLsUpdated(app, null, 4L); } TimelineEntity entity = null; do { entity = store.GetEntity(appId.ToString(), ApplicationMetricsConstants.EntityType , EnumSet.AllOf <TimelineReader.Field>()); }while (entity == null || entity.GetEvents().Count < 3); // ensure three events are both published before leaving the loop // verify all the fields NUnit.Framework.Assert.AreEqual(ApplicationMetricsConstants.EntityType, entity.GetEntityType ()); NUnit.Framework.Assert.AreEqual(app.GetApplicationId().ToString(), entity.GetEntityId ()); NUnit.Framework.Assert.AreEqual(app.GetName(), entity.GetOtherInfo()[ApplicationMetricsConstants .NameEntityInfo]); NUnit.Framework.Assert.AreEqual(app.GetQueue(), entity.GetOtherInfo()[ApplicationMetricsConstants .QueueEntityInfo]); NUnit.Framework.Assert.AreEqual(app.GetUser(), entity.GetOtherInfo()[ApplicationMetricsConstants .UserEntityInfo]); NUnit.Framework.Assert.AreEqual(app.GetApplicationType(), entity.GetOtherInfo()[ApplicationMetricsConstants .TypeEntityInfo]); NUnit.Framework.Assert.AreEqual(app.GetSubmitTime(), entity.GetOtherInfo()[ApplicationMetricsConstants .SubmittedTimeEntityInfo]); if (i == 1) { NUnit.Framework.Assert.AreEqual("uers1,user2", entity.GetOtherInfo()[ApplicationMetricsConstants .AppViewAclsEntityInfo]); } else { NUnit.Framework.Assert.AreEqual(string.Empty, entity.GetOtherInfo()[ApplicationMetricsConstants .AppViewAclsEntityInfo]); NUnit.Framework.Assert.AreEqual(app.GetRMAppMetrics().GetMemorySeconds(), long.Parse (entity.GetOtherInfo()[ApplicationMetricsConstants.AppMemMetrics].ToString())); NUnit.Framework.Assert.AreEqual(app.GetRMAppMetrics().GetVcoreSeconds(), long.Parse (entity.GetOtherInfo()[ApplicationMetricsConstants.AppCpuMetrics].ToString())); } bool hasCreatedEvent = false; bool hasFinishedEvent = false; bool hasACLsUpdatedEvent = false; foreach (TimelineEvent @event in entity.GetEvents()) { if (@event.GetEventType().Equals(ApplicationMetricsConstants.CreatedEventType)) { hasCreatedEvent = true; NUnit.Framework.Assert.AreEqual(app.GetStartTime(), @event.GetTimestamp()); } else { if (@event.GetEventType().Equals(ApplicationMetricsConstants.FinishedEventType)) { hasFinishedEvent = true; NUnit.Framework.Assert.AreEqual(app.GetFinishTime(), @event.GetTimestamp()); NUnit.Framework.Assert.AreEqual(app.GetDiagnostics().ToString(), @event.GetEventInfo ()[ApplicationMetricsConstants.DiagnosticsInfoEventInfo]); NUnit.Framework.Assert.AreEqual(app.GetFinalApplicationStatus().ToString(), @event .GetEventInfo()[ApplicationMetricsConstants.FinalStatusEventInfo]); NUnit.Framework.Assert.AreEqual(YarnApplicationState.Finished.ToString(), @event. GetEventInfo()[ApplicationMetricsConstants.StateEventInfo]); } else { if (@event.GetEventType().Equals(ApplicationMetricsConstants.AclsUpdatedEventType )) { hasACLsUpdatedEvent = true; NUnit.Framework.Assert.AreEqual(4L, @event.GetTimestamp()); } } } } NUnit.Framework.Assert.IsTrue(hasCreatedEvent && hasFinishedEvent && hasACLsUpdatedEvent ); } }
/// <summary>Returns all phases.</summary> /// <returns>Iterable<Phase> containing all phases</returns> public virtual IEnumerable <Phase> GetPhases() { return(EnumSet.AllOf <Phase>()); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt GetApplication (ApplicationId appId, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField field) { TimelineEntity entity = timelineDataManager.GetEntity(ApplicationMetricsConstants .EntityType, appId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ApplicationNotFoundException("The entity for application " + appId + " doesn't exist in the timeline store" ); } else { return(GenerateApplicationReport(entity, field)); } }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> private ApplicationAttemptReport GetApplicationAttempt(ApplicationAttemptId appAttemptId , bool checkACLs) { if (checkACLs) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appAttemptId.GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); } TimelineEntity entity = timelineDataManager.GetEntity(AppAttemptMetricsConstants. EntityType, appAttemptId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ApplicationAttemptNotFoundException("The entity for application attempt " + appAttemptId + " doesn't exist in the timeline store"); } else { return(ConvertToApplicationAttemptReport(entity)); } }
/// <exception cref="System.IO.IOException"/> public virtual TimelineEntities GetEntities(string entityType, long limit, long windowStart , long windowEnd, string fromId, long fromTs, NameValuePair primaryFilter, ICollection <NameValuePair> secondaryFilters, EnumSet <TimelineReader.Field> fields, TimelineDataManager.CheckAcl checkAcl) { lock (this) { if (limit == null) { limit = DefaultLimit; } if (windowStart == null) { windowStart = long.MinValue; } if (windowEnd == null) { windowEnd = long.MaxValue; } if (fields == null) { fields = EnumSet.AllOf <TimelineReader.Field>(); } IEnumerator <TimelineEntity> entityIterator = null; if (fromId != null) { TimelineEntity firstEntity = entities[new EntityIdentifier(fromId, entityType)]; if (firstEntity == null) { return(new TimelineEntities()); } else { entityIterator = new TreeSet <TimelineEntity>(entities.Values).TailSet(firstEntity , true).GetEnumerator(); } } if (entityIterator == null) { entityIterator = new PriorityQueue <TimelineEntity>(entities.Values).GetEnumerator (); } IList <TimelineEntity> entitiesSelected = new AList <TimelineEntity>(); while (entityIterator.HasNext()) { TimelineEntity entity = entityIterator.Next(); if (entitiesSelected.Count >= limit) { break; } if (!entity.GetEntityType().Equals(entityType)) { continue; } if (entity.GetStartTime() <= windowStart) { continue; } if (entity.GetStartTime() > windowEnd) { continue; } if (fromTs != null && entityInsertTimes[new EntityIdentifier(entity.GetEntityId() , entity.GetEntityType())] > fromTs) { continue; } if (primaryFilter != null && !MatchPrimaryFilter(entity.GetPrimaryFilters(), primaryFilter )) { continue; } if (secondaryFilters != null) { // AND logic bool flag = true; foreach (NameValuePair secondaryFilter in secondaryFilters) { if (secondaryFilter != null && !MatchPrimaryFilter(entity.GetPrimaryFilters(), secondaryFilter ) && !MatchFilter(entity.GetOtherInfo(), secondaryFilter)) { flag = false; break; } } if (!flag) { continue; } } if (entity.GetDomainId() == null) { entity.SetDomainId(TimelineDataManager.DefaultDomainId); } if (checkAcl == null || checkAcl.Check(entity)) { entitiesSelected.AddItem(entity); } } IList <TimelineEntity> entitiesToReturn = new AList <TimelineEntity>(); foreach (TimelineEntity entitySelected in entitiesSelected) { entitiesToReturn.AddItem(MaskFields(entitySelected, fields)); } entitiesToReturn.Sort(); TimelineEntities entitiesWrapper = new TimelineEntities(); entitiesWrapper.SetEntities(entitiesToReturn); return(entitiesWrapper); } }
/// <exception cref="System.Exception"/> public virtual void TestPublishContainerMetrics() { ContainerId containerId = ContainerId.NewContainerId(ApplicationAttemptId.NewInstance (ApplicationId.NewInstance(0, 1), 1), 1); RMContainer container = CreateRMContainer(containerId); metricsPublisher.ContainerCreated(container, container.GetCreationTime()); metricsPublisher.ContainerFinished(container, container.GetFinishTime()); TimelineEntity entity = null; do { entity = store.GetEntity(containerId.ToString(), ContainerMetricsConstants.EntityType , EnumSet.AllOf <TimelineReader.Field>()); }while (entity == null || entity.GetEvents().Count < 2); // ensure two events are both published before leaving the loop // verify all the fields NUnit.Framework.Assert.AreEqual(ContainerMetricsConstants.EntityType, entity.GetEntityType ()); NUnit.Framework.Assert.AreEqual(containerId.ToString(), entity.GetEntityId()); NUnit.Framework.Assert.AreEqual(containerId.GetApplicationAttemptId().ToString(), entity.GetPrimaryFilters()[ContainerMetricsConstants.ParentPrimariyFilter].GetEnumerator ().Next()); NUnit.Framework.Assert.AreEqual(container.GetAllocatedNode().GetHost(), entity.GetOtherInfo ()[ContainerMetricsConstants.AllocatedHostEntityInfo]); NUnit.Framework.Assert.AreEqual(container.GetAllocatedNode().GetPort(), entity.GetOtherInfo ()[ContainerMetricsConstants.AllocatedPortEntityInfo]); NUnit.Framework.Assert.AreEqual(container.GetAllocatedResource().GetMemory(), entity .GetOtherInfo()[ContainerMetricsConstants.AllocatedMemoryEntityInfo]); NUnit.Framework.Assert.AreEqual(container.GetAllocatedResource().GetVirtualCores( ), entity.GetOtherInfo()[ContainerMetricsConstants.AllocatedVcoreEntityInfo]); NUnit.Framework.Assert.AreEqual(container.GetAllocatedPriority().GetPriority(), entity .GetOtherInfo()[ContainerMetricsConstants.AllocatedPriorityEntityInfo]); bool hasCreatedEvent = false; bool hasFinishedEvent = false; foreach (TimelineEvent @event in entity.GetEvents()) { if (@event.GetEventType().Equals(ContainerMetricsConstants.CreatedEventType)) { hasCreatedEvent = true; NUnit.Framework.Assert.AreEqual(container.GetCreationTime(), @event.GetTimestamp( )); } else { if (@event.GetEventType().Equals(ContainerMetricsConstants.FinishedEventType)) { hasFinishedEvent = true; NUnit.Framework.Assert.AreEqual(container.GetFinishTime(), @event.GetTimestamp()); NUnit.Framework.Assert.AreEqual(container.GetDiagnosticsInfo(), @event.GetEventInfo ()[ContainerMetricsConstants.DiagnosticsInfoEventInfo]); NUnit.Framework.Assert.AreEqual(container.GetContainerExitStatus(), @event.GetEventInfo ()[ContainerMetricsConstants.ExitStatusEventInfo]); NUnit.Framework.Assert.AreEqual(container.GetContainerState().ToString(), @event. GetEventInfo()[ContainerMetricsConstants.StateEventInfo]); } } } NUnit.Framework.Assert.IsTrue(hasCreatedEvent && hasFinishedEvent); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual ContainerReport GetContainer(ContainerId containerId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (containerId.GetApplicationAttemptId().GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); TimelineEntity entity = timelineDataManager.GetEntity(ContainerMetricsConstants.EntityType , containerId.ToString(), EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); if (entity == null) { throw new ContainerNotFoundException("The entity for container " + containerId + " doesn't exist in the timeline store"); } else { return(ConvertToContainerReport(entity, serverHttpAddress, app.appReport.GetUser( ))); } }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ApplicationAttemptId, ApplicationAttemptReport> GetApplicationAttempts (ApplicationId appId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appId, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField.UserAndAcls ); CheckAccess(app); TimelineEntities entities = timelineDataManager.GetEntities(AppAttemptMetricsConstants .EntityType, new NameValuePair(AppAttemptMetricsConstants.ParentPrimaryFilter, appId .ToString()), null, null, null, null, null, long.MaxValue, EnumSet.AllOf <TimelineReader.Field >(), UserGroupInformation.GetLoginUser()); IDictionary <ApplicationAttemptId, ApplicationAttemptReport> appAttempts = new LinkedHashMap <ApplicationAttemptId, ApplicationAttemptReport>(); foreach (TimelineEntity entity in entities.GetEntities()) { ApplicationAttemptReport appAttempt = ConvertToApplicationAttemptReport(entity); appAttempts[appAttempt.GetApplicationAttemptId()] = appAttempt; } return(appAttempts); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ApplicationId, ApplicationReport> GetApplications(long appsNum) { TimelineEntities entities = timelineDataManager.GetEntities(ApplicationMetricsConstants .EntityType, null, null, null, null, null, null, appsNum == long.MaxValue ? this .maxLoadedApplications : appsNum, EnumSet.AllOf <TimelineReader.Field>(), UserGroupInformation .GetLoginUser()); IDictionary <ApplicationId, ApplicationReport> apps = new LinkedHashMap <ApplicationId , ApplicationReport>(); if (entities != null && entities.GetEntities() != null) { foreach (TimelineEntity entity in entities.GetEntities()) { try { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GenerateApplicationReport (entity, ApplicationHistoryManagerOnTimelineStore.ApplicationReportField.All); apps[app.appReport.GetApplicationId()] = app.appReport; } catch (Exception e) { Log.Error("Error on generating application report for " + entity.GetEntityId(), e ); } } } return(apps); }
/// <summary>Process params from command line and run set of benchmarks specified.</summary> /// <exception cref="System.IO.IOException"/> public virtual int Run(string[] argv) { JobConf job = new JobConf(GetConf()); EnumSet <FileBench.CCodec> cc = null; EnumSet <FileBench.CType> ct = null; EnumSet <FileBench.Format> f = null; EnumSet <FileBench.RW> rw = null; Path root = null; FileSystem fs = FileSystem.Get(job); for (int i = 0; i < argv.Length; ++i) { try { if ("-dir".Equals(argv[i])) { root = new Path(argv[++i]).MakeQualified(fs); System.Console.Out.WriteLine("DIR: " + root.ToString()); } else { if ("-seed".Equals(argv[i])) { job.SetLong("filebench.seed", Sharpen.Extensions.ValueOf(argv[++i])); } else { if (argv[i].StartsWith("-no")) { string arg = Sharpen.Runtime.Substring(argv[i], 3); cc = Rem <FileBench.CCodec>(cc, arg); ct = Rem <FileBench.CType>(ct, arg); f = Rem <FileBench.Format>(f, arg); rw = Rem <FileBench.RW>(rw, arg); } else { string arg = Sharpen.Runtime.Substring(argv[i], 1); cc = Add <FileBench.CCodec>(cc, arg); ct = Add <FileBench.CType>(ct, arg); f = Add <FileBench.Format>(f, arg); rw = Add <FileBench.RW>(rw, arg); } } } } catch (Exception e) { throw (IOException)Sharpen.Extensions.InitCause(new IOException(), e); } } if (null == root) { System.Console.Out.WriteLine("Missing -dir param"); PrintUsage(); return(-1); } FillBlocks(job); job.SetOutputKeyClass(typeof(Org.Apache.Hadoop.IO.Text)); job.SetOutputValueClass(typeof(Org.Apache.Hadoop.IO.Text)); FileInputFormat.SetInputPaths(job, root); FileOutputFormat.SetOutputPath(job, root); if (null == cc) { cc = EnumSet.AllOf <FileBench.CCodec>(); } if (null == ct) { ct = EnumSet.AllOf <FileBench.CType>(); } if (null == f) { f = EnumSet.AllOf <FileBench.Format>(); } if (null == rw) { rw = EnumSet.AllOf <FileBench.RW>(); } foreach (FileBench.RW rwop in rw) { foreach (FileBench.Format fmt in f) { fmt.Configure(job); foreach (FileBench.CCodec cod in cc) { cod.Configure(job); if (!(fmt == FileBench.Format.txt || cod == FileBench.CCodec.pln)) { foreach (FileBench.CType typ in ct) { string fn = StringUtils.ToUpperCase(fmt.ToString()) + "_" + StringUtils.ToUpperCase (cod.ToString()) + "_" + StringUtils.ToUpperCase(typ.ToString()); typ.Configure(job); System.Console.Out.Write(StringUtils.ToUpperCase(rwop.ToString()) + " " + fn + ": " ); System.Console.Out.WriteLine(rwop.Exec(fn, job) / 1000 + " seconds"); } } else { string fn = StringUtils.ToUpperCase(fmt.ToString()) + "_" + StringUtils.ToUpperCase (cod.ToString()); Path p = new Path(root, fn); if (rwop == FileBench.RW.r && !fs.Exists(p)) { fn += cod.GetExt(); } System.Console.Out.Write(StringUtils.ToUpperCase(rwop.ToString()) + " " + fn + ": " ); System.Console.Out.WriteLine(rwop.Exec(fn, job) / 1000 + " seconds"); } } } } return(0); }
public virtual void TestFrozenAfterStartupCompletes() { // Do some updates and counter increments. startupProgress.BeginPhase(Phase.LoadingFsimage); startupProgress.SetFile(Phase.LoadingFsimage, "file1"); startupProgress.SetSize(Phase.LoadingFsimage, 1000L); Step step = new Step(StepType.Inodes); startupProgress.BeginStep(Phase.LoadingFsimage, step); startupProgress.SetTotal(Phase.LoadingFsimage, step, 10000L); StartupProgressTestHelper.IncrementCounter(startupProgress, Phase.LoadingFsimage, step, 100L); startupProgress.EndStep(Phase.LoadingFsimage, step); startupProgress.EndPhase(Phase.LoadingFsimage); // Force completion of phases, so that entire startup process is completed. foreach (Phase phase in EnumSet.AllOf <Phase>()) { if (startupProgress.GetStatus(phase) != Status.Complete) { startupProgress.BeginPhase(phase); startupProgress.EndPhase(phase); } } StartupProgressView before = startupProgress.CreateView(); // Attempt more updates and counter increments. startupProgress.BeginPhase(Phase.LoadingFsimage); startupProgress.SetFile(Phase.LoadingFsimage, "file2"); startupProgress.SetSize(Phase.LoadingFsimage, 2000L); startupProgress.BeginStep(Phase.LoadingFsimage, step); startupProgress.SetTotal(Phase.LoadingFsimage, step, 20000L); StartupProgressTestHelper.IncrementCounter(startupProgress, Phase.LoadingFsimage, step, 100L); startupProgress.EndStep(Phase.LoadingFsimage, step); startupProgress.EndPhase(Phase.LoadingFsimage); // Also attempt a whole new step that wasn't used last time. startupProgress.BeginPhase(Phase.LoadingEdits); Step newStep = new Step("file1"); startupProgress.BeginStep(Phase.LoadingEdits, newStep); StartupProgressTestHelper.IncrementCounter(startupProgress, Phase.LoadingEdits, newStep , 100L); startupProgress.EndStep(Phase.LoadingEdits, newStep); startupProgress.EndPhase(Phase.LoadingEdits); StartupProgressView after = startupProgress.CreateView(); // Expect that data was frozen after completion of entire startup process, so // second set of updates and counter increments should have had no effect. NUnit.Framework.Assert.AreEqual(before.GetCount(Phase.LoadingFsimage), after.GetCount (Phase.LoadingFsimage)); NUnit.Framework.Assert.AreEqual(before.GetCount(Phase.LoadingFsimage, step), after .GetCount(Phase.LoadingFsimage, step)); NUnit.Framework.Assert.AreEqual(before.GetElapsedTime(), after.GetElapsedTime()); NUnit.Framework.Assert.AreEqual(before.GetElapsedTime(Phase.LoadingFsimage), after .GetElapsedTime(Phase.LoadingFsimage)); NUnit.Framework.Assert.AreEqual(before.GetElapsedTime(Phase.LoadingFsimage, step) , after.GetElapsedTime(Phase.LoadingFsimage, step)); NUnit.Framework.Assert.AreEqual(before.GetFile(Phase.LoadingFsimage), after.GetFile (Phase.LoadingFsimage)); NUnit.Framework.Assert.AreEqual(before.GetSize(Phase.LoadingFsimage), after.GetSize (Phase.LoadingFsimage)); NUnit.Framework.Assert.AreEqual(before.GetTotal(Phase.LoadingFsimage), after.GetTotal (Phase.LoadingFsimage)); NUnit.Framework.Assert.AreEqual(before.GetTotal(Phase.LoadingFsimage, step), after .GetTotal(Phase.LoadingFsimage, step)); NUnit.Framework.Assert.IsFalse(after.GetSteps(Phase.LoadingEdits).GetEnumerator() .HasNext()); }
/// <exception cref="System.Exception"/> public virtual void TestPublishAppAttemptMetrics() { ApplicationAttemptId appAttemptId = ApplicationAttemptId.NewInstance(ApplicationId .NewInstance(0, 1), 1); RMAppAttempt appAttempt = CreateRMAppAttempt(appAttemptId); metricsPublisher.AppAttemptRegistered(appAttempt, int.MaxValue + 1L); RMApp app = Org.Mockito.Mockito.Mock <RMApp>(); Org.Mockito.Mockito.When(app.GetFinalApplicationStatus()).ThenReturn(FinalApplicationStatus .Undefined); metricsPublisher.AppAttemptFinished(appAttempt, RMAppAttemptState.Finished, app, int.MaxValue + 2L); TimelineEntity entity = null; do { entity = store.GetEntity(appAttemptId.ToString(), AppAttemptMetricsConstants.EntityType , EnumSet.AllOf <TimelineReader.Field>()); }while (entity == null || entity.GetEvents().Count < 2); // ensure two events are both published before leaving the loop // verify all the fields NUnit.Framework.Assert.AreEqual(AppAttemptMetricsConstants.EntityType, entity.GetEntityType ()); NUnit.Framework.Assert.AreEqual(appAttemptId.ToString(), entity.GetEntityId()); NUnit.Framework.Assert.AreEqual(appAttemptId.GetApplicationId().ToString(), entity .GetPrimaryFilters()[AppAttemptMetricsConstants.ParentPrimaryFilter].GetEnumerator ().Next()); bool hasRegisteredEvent = false; bool hasFinishedEvent = false; foreach (TimelineEvent @event in entity.GetEvents()) { if (@event.GetEventType().Equals(AppAttemptMetricsConstants.RegisteredEventType)) { hasRegisteredEvent = true; NUnit.Framework.Assert.AreEqual(appAttempt.GetHost(), @event.GetEventInfo()[AppAttemptMetricsConstants .HostEventInfo]); NUnit.Framework.Assert.AreEqual(appAttempt.GetRpcPort(), @event.GetEventInfo()[AppAttemptMetricsConstants .RpcPortEventInfo]); NUnit.Framework.Assert.AreEqual(appAttempt.GetMasterContainer().GetId().ToString( ), @event.GetEventInfo()[AppAttemptMetricsConstants.MasterContainerEventInfo]); } else { if (@event.GetEventType().Equals(AppAttemptMetricsConstants.FinishedEventType)) { hasFinishedEvent = true; NUnit.Framework.Assert.AreEqual(appAttempt.GetDiagnostics(), @event.GetEventInfo( )[AppAttemptMetricsConstants.DiagnosticsInfoEventInfo]); NUnit.Framework.Assert.AreEqual(appAttempt.GetTrackingUrl(), @event.GetEventInfo( )[AppAttemptMetricsConstants.TrackingUrlEventInfo]); NUnit.Framework.Assert.AreEqual(appAttempt.GetOriginalTrackingUrl(), @event.GetEventInfo ()[AppAttemptMetricsConstants.OriginalTrackingUrlEventInfo]); NUnit.Framework.Assert.AreEqual(FinalApplicationStatus.Undefined.ToString(), @event .GetEventInfo()[AppAttemptMetricsConstants.FinalStatusEventInfo]); NUnit.Framework.Assert.AreEqual(YarnApplicationAttemptState.Finished.ToString(), @event.GetEventInfo()[AppAttemptMetricsConstants.StateEventInfo]); } } } NUnit.Framework.Assert.IsTrue(hasRegisteredEvent && hasFinishedEvent); }
/// <exception cref="Org.Apache.Hadoop.Yarn.Exceptions.YarnException"/> /// <exception cref="System.IO.IOException"/> public virtual IDictionary <ContainerId, ContainerReport> GetContainers(ApplicationAttemptId appAttemptId) { ApplicationHistoryManagerOnTimelineStore.ApplicationReportExt app = GetApplication (appAttemptId.GetApplicationId(), ApplicationHistoryManagerOnTimelineStore.ApplicationReportField .UserAndAcls); CheckAccess(app); TimelineEntities entities = timelineDataManager.GetEntities(ContainerMetricsConstants .EntityType, new NameValuePair(ContainerMetricsConstants.ParentPrimariyFilter, appAttemptId .ToString()), null, null, null, null, null, long.MaxValue, EnumSet.AllOf <TimelineReader.Field >(), UserGroupInformation.GetLoginUser()); IDictionary <ContainerId, ContainerReport> containers = new LinkedHashMap <ContainerId , ContainerReport>(); if (entities != null && entities.GetEntities() != null) { foreach (TimelineEntity entity in entities.GetEntities()) { ContainerReport container = ConvertToContainerReport(entity, serverHttpAddress, app .appReport.GetUser()); containers[container.GetContainerId()] = container; } } return(containers); }