public void SmallerThanOperatorWithSecondObjectNull() { var first = new DatasetId(); DatasetId second = null; Assert.IsFalse(first < second); }
public void AddChild() { DatasetCreationInformation creationInformation = null; var childId = new DatasetId(); var storage = new Mock<IPersistenceInformation>(); var child = CreateMockDataset(); var dataset = new Mock<IProxyDataset>(); { dataset.Setup(d => d.StoredAt) .Returns(storage.Object); dataset.Setup(d => d.CreateNewChild(It.IsAny<DatasetCreationInformation>())) .Callback<DatasetCreationInformation>(d => creationInformation = d) .Returns(child); } var facade = new DatasetFacadeForScripts(new DatasetFacade(dataset.Object)); var childFacade = facade.AddChild(); Assert.AreEqual(new DatasetFacadeForScripts(new DatasetFacade(child)), childFacade); Assert.IsFalse(creationInformation.CanBeAdopted); Assert.IsTrue(creationInformation.CanBecomeParent); Assert.IsTrue(creationInformation.CanBeCopied); Assert.IsTrue(creationInformation.CanBeDeleted); Assert.AreEqual(DatasetCreator.User, creationInformation.CreatedOnRequestOf); Assert.AreEqual(storage.Object, creationInformation.LoadFrom); }
public void CompareToOperatorWithEqualObjects() { var first = new DatasetId(); object second = first.Clone(); Assert.AreEqual(0, first.CompareTo(second)); }
public void CompareToWithSmallerFirstObject() { var first = new DatasetId(1); var second = new DatasetId(2); Assert.IsTrue(first.CompareTo(second) < 0); }
public void CompareToWithNullObject() { var first = new DatasetId(); object second = null; Assert.AreEqual(1, first.CompareTo(second)); }
public void CompareToWithUnequalObjectTypes() { var first = new DatasetId(); var second = new object(); Assert.Throws<ArgumentException>(() => first.CompareTo(second)); }
public void EqualsWithUnequalObjectTypes() { var first = new DatasetId(); var second = new object(); Assert.IsFalse(first.Equals(second)); }
public void SmallerThanOperatorWithEqualObjects() { var first = new DatasetId(); var second = first.Clone(); Assert.IsFalse(first < second); }
public void LargerThanOperatorWithSecondObjectNull() { var first = new DatasetId(); DatasetId second = null; Assert.IsTrue(first > second); }
public void EqualsWithEqualObjects() { var first = new DatasetId(); object second = first.Clone(); Assert.IsTrue(first.Equals(second)); }
public void LargerThanOperatorWithFirstObjectNull() { DatasetId first = null; var second = new DatasetId(); Assert.IsFalse(first > second); }
private DatasetProxy CreateNewDatasetProxy(DatasetCreationInformation newChild) { var id = new DatasetId(); Action <DatasetId> cleanupAction = localId => DeleteDatasetAndChildren(localId, d => { }); var parameters = new DatasetConstructionParameters { Id = id, Owner = this, DistributionPlanGenerator = m_DatasetDistributor, CreatedOnRequestOf = newChild.CreatedOnRequestOf, CanBecomeParent = newChild.CanBecomeParent, CanBeAdopted = newChild.CanBeAdopted, CanBeCopied = newChild.CanBeCopied, CanBeDeleted = newChild.CanBeDeleted, IsRoot = newChild.IsRoot, LoadFrom = newChild.LoadFrom, OnRemoval = cleanupAction, }; var newDataset = m_Timeline.AddToTimeline( DatasetProxy.CreateInstance, parameters, m_DataStorageProxyBuilder, m_Notifications, m_Diagnostics); return(newDataset); }
public void SmallerThanOperatorWithFirstObjectNull() { DatasetId first = null; var second = new DatasetId(); Assert.IsTrue(first < second); }
public void SmallerThanOperatorWithFirstObjectSmaller() { var first = new DatasetId(1); var second = new DatasetId(2); Assert.IsTrue(first < second); }
public void EqualsWithNullObject() { var first = new DatasetId(); object second = null; Assert.IsFalse(first.Equals(second)); }
public void Clone() { var first = new DatasetId(); var second = first.Clone(); Assert.AreEqual(first, second); }
public void AddChild() { DatasetCreationInformation creationInformation = null; var childId = new DatasetId(); var storage = new Mock <IPersistenceInformation>(); var child = CreateMockDataset(); var dataset = new Mock <IProxyDataset>(); { dataset.Setup(d => d.StoredAt) .Returns(storage.Object); dataset.Setup(d => d.CreateNewChild(It.IsAny <DatasetCreationInformation>())) .Callback <DatasetCreationInformation>(d => creationInformation = d) .Returns(child); } var facade = new DatasetFacadeForScripts(new DatasetFacade(dataset.Object)); var childFacade = facade.AddChild(); Assert.AreEqual(new DatasetFacadeForScripts(new DatasetFacade(child)), childFacade); Assert.IsFalse(creationInformation.CanBeAdopted); Assert.IsTrue(creationInformation.CanBecomeParent); Assert.IsTrue(creationInformation.CanBeCopied); Assert.IsTrue(creationInformation.CanBeDeleted); Assert.AreEqual(DatasetCreator.User, creationInformation.CreatedOnRequestOf); Assert.AreEqual(storage.Object, creationInformation.LoadFrom); }
public void LargerThanOperatorWithFirstObjectSmaller() { var first = new DatasetId(1); var second = new DatasetId(2); Assert.IsFalse(first > second); }
public void SmallerThanOperatorWithBothObjectsNull() { DatasetId first = null; DatasetId second = null; Assert.IsFalse(first < second); }
/// <summary> /// Initializes a new instance of the <see cref="Project"/> class. /// </summary> /// <param name="timeline">The timeline for the current project.</param> /// <param name="distributor"> /// The function which returns a <see cref="DistributionPlan"/> for a given /// <see cref="DatasetActivationRequest"/>. /// </param> /// <param name="dataStorageProxyBuilder">The function which returns a storage proxy for a newly loaded dataset.</param> /// <param name="notifications">The object that stores the notifications for the user interface.</param> /// <param name="diagnostics">The object that provides the diagnostics methods for the application.</param> /// <param name="persistenceInfo"> /// The object that describes how the project was persisted. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown if <paramref name="timeline"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="distributor"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="dataStorageProxyBuilder"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="notifications"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="diagnostics"/> is <see langword="null" />. /// </exception> public Project( ITimeline timeline, Func <DatasetActivationRequest, CancellationToken, IEnumerable <DistributionPlan> > distributor, Func <DatasetOnlineInformation, DatasetStorageProxy> dataStorageProxyBuilder, ICollectNotifications notifications, SystemDiagnostics diagnostics, IPersistenceInformation persistenceInfo) { { Lokad.Enforce.Argument(() => timeline); Lokad.Enforce.Argument(() => distributor); Lokad.Enforce.Argument(() => dataStorageProxyBuilder); Lokad.Enforce.Argument(() => notifications); Lokad.Enforce.Argument(() => diagnostics); } m_Timeline = timeline; m_Timeline.ForgetAllHistory(); m_Timeline.OnRolledBack += OnTimelineRolledBack; m_Timeline.OnRolledForward += OnTimelineRolledForward; m_ProjectInformation = m_Timeline.AddToTimeline(ProjectHistoryStorage.CreateInstance); m_Datasets = m_Timeline.AddToTimeline(DatasetHistoryStorage.CreateInstance); m_DatasetDistributor = distributor; m_DataStorageProxyBuilder = dataStorageProxyBuilder; m_Notifications = notifications; m_Diagnostics = diagnostics; if (persistenceInfo != null) { RestoreFromStore(persistenceInfo); } // Create a root dataset if there isn't one if (m_RootDataset == null) { var dataset = CreateDataset( null, new DatasetCreationInformation { CreatedOnRequestOf = DatasetCreator.System, LoadFrom = new NullPersistenceInformation(), CanBeDeleted = false, CanBeCopied = false, CanBecomeParent = true, CanBeAdopted = false, IsRoot = true, }); m_RootDataset = dataset.Id; dataset.Name = Resources.Projects_Dataset_RootDatasetName; dataset.Summary = Resources.Projects_Dataset_RootDatasetSummary; } m_Timeline.SetCurrentAsDefault(); }
public override int GetHashCode() { var hash = 17; hash = hash * 23 + DatasetId.GetHashCode(); return(hash); }
/// <summary> /// Deletes the given dataset and all its children. /// </summary> /// <remarks> /// The dataset and all its children will be deleted. If any of the datasets are /// loaded onto a (remote) machine they will be unloaded just before being deleted. /// No data will be saved. /// </remarks> /// <param name="dataset">The dataset that should be deleted.</param> /// <exception cref="CannotDeleteDatasetException"> /// Thrown when the dataset or one of its children cannot be deleted. The exception /// is thrown before any of the datasets are deleted. /// </exception> public void DeleteDatasetAndChildren(DatasetId dataset) { { Debug.Assert(!IsClosed, "The project should not be closed if we want to create a new dataset."); } DeleteDatasetAndChildren(dataset, d => m_Timeline.RemoveFromTimeline(d.HistoryId)); RaiseOnDatasetDeleted(); }
/// <summary> /// Calculates a hash code for the seuqence /// </summary> /// <returns></returns> public override int GetHashCode() { var hash = 17; hash = hash * 23 + SequenceId.GetHashCode(); hash = hash * 23 + MsnFeatureId.GetHashCode(); hash = hash * 23 + DatasetId.GetHashCode(); return(hash); }
/// <summary> /// Returns the dataset with the given ID. /// </summary> /// <param name="id">The ID of the dataset.</param> /// <returns>The dataset with the given ID if it exists; otherwise, <see langword="null" />.</returns> public IProxyDataset Dataset(DatasetId id) { IProxyDataset result = null; if (m_Datasets.Datasets.ContainsKey(id)) { result = m_Datasets.Datasets[id]; } return(result); }
/// <summary> /// Creates a new dataset as child of the given parent dataset. /// </summary> /// <param name="parent">The parent.</param> /// <param name="newChild">The information required to create the new child.</param> /// <returns>The new child.</returns> public IProxyDataset CreateDataset(DatasetId parent, DatasetCreationInformation newChild) { { Debug.Assert(!IsClosed, "The project should not be closed if we want to create a new dataset."); Debug.Assert( (parent == null) || ((parent != null) && m_Datasets.Datasets.ContainsKey(parent)), "The provided parent node does not exist."); Debug.Assert( (parent == null) || ((parent != null) && m_Datasets.Datasets[parent].CanBecomeParent), "The given parent is not allowed to have children."); } m_Diagnostics.Log( LevelToLog.Trace, HostConstants.LogPrefix, string.Format( CultureInfo.InvariantCulture, Resources.Project_LogMessage_CreatingDataset_WithInformation, parent)); var dataset = CreateNewDatasetProxy(newChild); // When adding a new dataset there is no way we can create cycles because // we can only add new children to parents, there is no way to link an // existing node to the parent. lock (m_Lock) { m_Datasets.Datasets.Add(dataset.Id, dataset); m_Datasets.Graph.AddVertex(dataset.Id); } if (parent != null) { // Find the actual ID object that we have stored, the caller may have a copy // of ID. Using a copy of the real ID might cause issues when connecting the // graph so we only use the ID numbers that we have stored. var realParent = m_Datasets.Datasets[parent].Id; lock (m_Lock) { m_Datasets.Graph.AddEdge(new Edge <DatasetId>(realParent, dataset.Id)); } } RaiseOnDatasetCreated(); return(dataset); }
public override int GetHashCode() { int hash = 1; if (Id.Length != 0) { hash ^= Id.GetHashCode(); } if (Consumer.Length != 0) { hash ^= Consumer.GetHashCode(); } if (DatasetName.Length != 0) { hash ^= DatasetName.GetHashCode(); } if (DatasetType.Length != 0) { hash ^= DatasetType.GetHashCode(); } if (DatasetId.Length != 0) { hash ^= DatasetId.GetHashCode(); } hash ^= tags_.GetHashCode(); if (data_ != null) { hash ^= Data.GetHashCode(); } hash ^= targetValues_.GetHashCode(); if (model_ != null) { hash ^= Model.GetHashCode(); } hash ^= prediction_.GetHashCode(); if (TimeAdded != 0UL) { hash ^= TimeAdded.GetHashCode(); } if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
/// <summary> /// Exports the given dataset as the base of a new project. /// </summary> /// <param name="datasetToExport"> /// The ID number of the dataset that should be exported. /// </param> /// <param name="shouldIncludeChildren"> /// Indicates if all the child datasets of <paramref name="datasetToExport"/> should be included in the /// export or not. /// </param> /// <param name="persistenceInfo"> /// The object that describes how the dataset should be exported. /// </param> /// <remarks> /// Note that saving project and dataset information to a stream on the local machine may take /// some time because the datasets may be large, reside on a remote machine or both. /// </remarks> public void Export(DatasetId datasetToExport, bool shouldIncludeChildren, IPersistenceInformation persistenceInfo) { { Lokad.Enforce.With <CannotUseProjectAfterClosingItException>( !IsClosed, Resources.Exceptions_Messages_CannotUseProjectAfterClosingIt); Lokad.Enforce.Argument(() => datasetToExport); Lokad.Enforce.With <UnknownDatasetException>( m_Datasets.Datasets.ContainsKey(datasetToExport), Resources.Exceptions_Messages_UnknownDataset_WithId, datasetToExport); Lokad.Enforce.Argument(() => persistenceInfo); } // Do we need to have a save flag that we can set to prevent closing from happening // while saving? throw new NotImplementedException(); }
/// <summary> /// Returns the collection of children for a given dataset. /// </summary> /// <param name="parent">The ID number of the parent dataset.</param> /// <returns>The collection of child datasets.</returns> public IEnumerable <DatasetProxy> Children(DatasetId parent) { { Debug.Assert(!IsClosed, "The project should not be closed if we want to get the children of a dataset."); } List <Edge <DatasetId> > outEdges; lock (m_Lock) { outEdges = m_Datasets.Graph.OutEdges(parent).ToList(); } var result = from outEdge in outEdges select m_Datasets.Datasets[outEdge.Target]; return(result); }
public override int GetHashCode() { int hash = 1; if (modelMetadataCase_ == ModelMetadataOneofCase.TranslationModelMetadata) { hash ^= TranslationModelMetadata.GetHashCode(); } if (Name.Length != 0) { hash ^= Name.GetHashCode(); } if (DisplayName.Length != 0) { hash ^= DisplayName.GetHashCode(); } if (DatasetId.Length != 0) { hash ^= DatasetId.GetHashCode(); } if (createTime_ != null) { hash ^= CreateTime.GetHashCode(); } if (updateTime_ != null) { hash ^= UpdateTime.GetHashCode(); } if (DeploymentState != 0) { hash ^= DeploymentState.GetHashCode(); } hash ^= Labels.GetHashCode(); hash ^= (int)modelMetadataCase_; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
public override int GetHashCode() { int hash = 1; if (ProjectId.Length != 0) { hash ^= ProjectId.GetHashCode(); } if (DatasetId.Length != 0) { hash ^= DatasetId.GetHashCode(); } if (TableId.Length != 0) { hash ^= TableId.GetHashCode(); } if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
private DatasetProxy CreateNewDatasetProxy(DatasetCreationInformation newChild) { var id = new DatasetId(); Action<DatasetId> cleanupAction = localId => DeleteDatasetAndChildren(localId, d => { }); var parameters = new DatasetConstructionParameters { Id = id, Owner = this, DistributionPlanGenerator = m_DatasetDistributor, CreatedOnRequestOf = newChild.CreatedOnRequestOf, CanBecomeParent = newChild.CanBecomeParent, CanBeAdopted = newChild.CanBeAdopted, CanBeCopied = newChild.CanBeCopied, CanBeDeleted = newChild.CanBeDeleted, IsRoot = newChild.IsRoot, LoadFrom = newChild.LoadFrom, OnRemoval = cleanupAction, }; var newDataset = m_Timeline.AddToTimeline( DatasetProxy.CreateInstance, parameters, m_DataStorageProxyBuilder, m_Notifications, m_Diagnostics); return newDataset; }
/// <summary> /// Exports the given dataset as the base of a new project. /// </summary> /// <param name="datasetToExport"> /// The ID number of the dataset that should be exported. /// </param> /// <param name="shouldIncludeChildren"> /// Indicates if all the child datasets of <paramref name="datasetToExport"/> should be included in the /// export or not. /// </param> /// <param name="persistenceInfo"> /// The object that describes how the dataset should be exported. /// </param> /// <remarks> /// Note that saving project and dataset information to a stream on the local machine may take /// some time because the datasets may be large, reside on a remote machine or both. /// </remarks> public void Export(DatasetId datasetToExport, bool shouldIncludeChildren, IPersistenceInformation persistenceInfo) { { Lokad.Enforce.With<CannotUseProjectAfterClosingItException>( !IsClosed, Resources.Exceptions_Messages_CannotUseProjectAfterClosingIt); Lokad.Enforce.Argument(() => datasetToExport); Lokad.Enforce.With<UnknownDatasetException>( m_Datasets.Datasets.ContainsKey(datasetToExport), Resources.Exceptions_Messages_UnknownDataset_WithId, datasetToExport); Lokad.Enforce.Argument(() => persistenceInfo); } // Do we need to have a save flag that we can set to prevent closing from happening // while saving? throw new NotImplementedException(); }
/// <summary> /// Returns the dataset with the given ID. /// </summary> /// <param name="id">The ID of the dataset.</param> /// <returns>The dataset with the given ID if it exists; otherwise, <see langword="null" />.</returns> public IProxyDataset Dataset(DatasetId id) { IProxyDataset result = null; if (m_Datasets.Datasets.ContainsKey(id)) { result = m_Datasets.Datasets[id]; } return result; }
/// <summary> /// Creates a new dataset as child of the given parent dataset. /// </summary> /// <param name="parent">The parent.</param> /// <param name="newChild">The information required to create the new child.</param> /// <returns>The new child.</returns> public IProxyDataset CreateDataset(DatasetId parent, DatasetCreationInformation newChild) { { Debug.Assert(!IsClosed, "The project should not be closed if we want to create a new dataset."); Debug.Assert( (parent == null) || ((parent != null) && m_Datasets.Datasets.ContainsKey(parent)), "The provided parent node does not exist."); Debug.Assert( (parent == null) || ((parent != null) && m_Datasets.Datasets[parent].CanBecomeParent), "The given parent is not allowed to have children."); } m_Diagnostics.Log( LevelToLog.Trace, HostConstants.LogPrefix, string.Format( CultureInfo.InvariantCulture, Resources.Project_LogMessage_CreatingDataset_WithInformation, parent)); var dataset = CreateNewDatasetProxy(newChild); // When adding a new dataset there is no way we can create cycles because // we can only add new children to parents, there is no way to link an // existing node to the parent. lock (m_Lock) { m_Datasets.Datasets.Add(dataset.Id, dataset); m_Datasets.Graph.AddVertex(dataset.Id); } if (parent != null) { // Find the actual ID object that we have stored, the caller may have a copy // of ID. Using a copy of the real ID might cause issues when connecting the // graph so we only use the ID numbers that we have stored. var realParent = m_Datasets.Datasets[parent].Id; lock (m_Lock) { m_Datasets.Graph.AddEdge(new Edge<DatasetId>(realParent, dataset.Id)); } } RaiseOnDatasetCreated(); return dataset; }
/// <summary> /// Returns the collection of children for a given dataset. /// </summary> /// <param name="parent">The ID number of the parent dataset.</param> /// <returns>The collection of child datasets.</returns> public IEnumerable<DatasetProxy> Children(DatasetId parent) { { Debug.Assert(!IsClosed, "The project should not be closed if we want to get the children of a dataset."); } List<Edge<DatasetId>> outEdges; lock (m_Lock) { outEdges = m_Datasets.Graph.OutEdges(parent).ToList(); } var result = from outEdge in outEdges select m_Datasets.Datasets[outEdge.Target]; return result; }
/// <summary> /// Initializes a new instance of the <see cref="Project"/> class. /// </summary> /// <param name="timeline">The timeline for the current project.</param> /// <param name="distributor"> /// The function which returns a <see cref="DistributionPlan"/> for a given /// <see cref="DatasetActivationRequest"/>. /// </param> /// <param name="dataStorageProxyBuilder">The function which returns a storage proxy for a newly loaded dataset.</param> /// <param name="notifications">The object that stores the notifications for the user interface.</param> /// <param name="diagnostics">The object that provides the diagnostics methods for the application.</param> /// <param name="persistenceInfo"> /// The object that describes how the project was persisted. /// </param> /// <exception cref="ArgumentNullException"> /// Thrown if <paramref name="timeline"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="distributor"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="dataStorageProxyBuilder"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="notifications"/> is <see langword="null" />. /// </exception> /// <exception cref="ArgumentNullException"> /// Thrown when <paramref name="diagnostics"/> is <see langword="null" />. /// </exception> public Project( ITimeline timeline, Func<DatasetActivationRequest, CancellationToken, IEnumerable<DistributionPlan>> distributor, Func<DatasetOnlineInformation, DatasetStorageProxy> dataStorageProxyBuilder, ICollectNotifications notifications, SystemDiagnostics diagnostics, IPersistenceInformation persistenceInfo) { { Lokad.Enforce.Argument(() => timeline); Lokad.Enforce.Argument(() => distributor); Lokad.Enforce.Argument(() => dataStorageProxyBuilder); Lokad.Enforce.Argument(() => notifications); Lokad.Enforce.Argument(() => diagnostics); } m_Timeline = timeline; m_Timeline.ForgetAllHistory(); m_Timeline.OnRolledBack += OnTimelineRolledBack; m_Timeline.OnRolledForward += OnTimelineRolledForward; m_ProjectInformation = m_Timeline.AddToTimeline(ProjectHistoryStorage.CreateInstance); m_Datasets = m_Timeline.AddToTimeline(DatasetHistoryStorage.CreateInstance); m_DatasetDistributor = distributor; m_DataStorageProxyBuilder = dataStorageProxyBuilder; m_Notifications = notifications; m_Diagnostics = diagnostics; if (persistenceInfo != null) { RestoreFromStore(persistenceInfo); } // Create a root dataset if there isn't one if (m_RootDataset == null) { var dataset = CreateDataset( null, new DatasetCreationInformation { CreatedOnRequestOf = DatasetCreator.System, LoadFrom = new NullPersistenceInformation(), CanBeDeleted = false, CanBeCopied = false, CanBecomeParent = true, CanBeAdopted = false, IsRoot = true, }); m_RootDataset = dataset.Id; dataset.Name = Resources.Projects_Dataset_RootDatasetName; dataset.Summary = Resources.Projects_Dataset_RootDatasetSummary; } m_Timeline.SetCurrentAsDefault(); }
public void Create() { var commandHub = new Mock<ISendCommandsToRemoteEndpoints>(); var id = new DatasetId(); var endpoint = EndpointIdExtensions.CreateEndpointIdForCurrentProcess(); var networkId = NetworkIdentifier.ForLocalMachine(); var systemDiagnostics = new SystemDiagnostics((p, s) => { }, null); var notifications = new Mock<IDatasetApplicationNotifications>(); var notificationHub = new Mock<INotifyOfRemoteEndpointEvents>(); { notificationHub.Setup(n => n.HasNotificationsFor(It.IsAny<EndpointId>())) .Returns(true); notificationHub.Setup(n => n.NotificationsFor<IDatasetApplicationNotifications>(It.IsAny<EndpointId>())) .Callback<EndpointId>(e => Assert.AreSame(endpoint, e)) .Returns(notifications.Object); } var info = new DatasetOnlineInformation( id, endpoint, networkId, commandHub.Object, notificationHub.Object, systemDiagnostics); Assert.AreSame(id, info.Id); Assert.AreSame(endpoint, info.Endpoint); Assert.AreSame(networkId, info.RunsOn); }
public void Notification() { var commandHub = new Mock<ISendCommandsToRemoteEndpoints>(); var id = new DatasetId(); var endpoint = EndpointIdExtensions.CreateEndpointIdForCurrentProcess(); var networkId = NetworkIdentifier.ForLocalMachine(); var systemDiagnostics = new SystemDiagnostics((p, s) => { }, null); var notificationList = new SortedList<Type, INotificationSet> { { typeof(IMockNotificationSetWithTypedEventHandler), new Mock<IMockNotificationSetWithTypedEventHandler>().Object }, }; var datasetNotifications = new Mock<IDatasetApplicationNotifications>(); var notificationHub = new Mock<INotifyOfRemoteEndpointEvents>(); { notificationHub.Setup( h => h.NotificationsFor<IMockNotificationSetWithTypedEventHandler>(It.IsAny<EndpointId>())) .Returns((IMockNotificationSetWithTypedEventHandler)notificationList.Values[0]); notificationHub.Setup(n => n.NotificationsFor<IDatasetApplicationNotifications>(It.IsAny<EndpointId>())) .Callback<EndpointId>(e => Assert.AreSame(endpoint, e)) .Returns(datasetNotifications.Object); } var info = new DatasetOnlineInformation( id, endpoint, networkId, commandHub.Object, notificationHub.Object, systemDiagnostics); var notifications = info.Notification<IMockNotificationSetWithTypedEventHandler>(); Assert.AreSame(notificationList.Values[0], notifications); }
/// <summary> /// Initializes a new instance of the <see cref="DatasetCannotBecomeParentException"/> class. /// </summary> /// <param name="id">The ID number of the dataset that could not be made a parent.</param> public DatasetCannotBecomeParentException(DatasetId id) : this(string.Format(CultureInfo.InvariantCulture, Resources.Exceptions_Messages_DatasetCannotBecomeParent_WithId, id)) { }
private void DeleteDatasetAndChildren(DatasetId dataset, Action<DatasetProxy> onRemoval) { if (!m_Datasets.Datasets.ContainsKey(dataset)) { return; } m_Diagnostics.Log( LevelToLog.Info, HostConstants.LogPrefix, string.Format( CultureInfo.InvariantCulture, Resources.Project_LogMessage_DeletingDatasetAndChildren_WithInformation, dataset)); // Get all the datasets that need to be deleted // make sure we do this in an ordered way. We need to // remove the children before we can remove a parent. var datasetsToDelete = new Stack<DatasetId>(); datasetsToDelete.Push(dataset); var nodesToProcess = new Queue<DatasetId>(); nodesToProcess.Enqueue(dataset); while (nodesToProcess.Count > 0) { var node = nodesToProcess.Dequeue(); Debug.Assert(m_Datasets.Datasets.ContainsKey(node), "The dataset was in the graph but not in the collection."); if (!m_Datasets.Datasets[node].CanBeDeleted) { throw new CannotDeleteDatasetException(); } var children = Children(node); foreach (var child in children) { nodesToProcess.Enqueue(child.Id); datasetsToDelete.Push(child.Id); } } while (datasetsToDelete.Count > 0) { var datasetToDelete = datasetsToDelete.Pop(); var datasetObject = m_Datasets.Datasets[datasetToDelete]; if (datasetObject.IsActivated) { CloseOnlineDataset(datasetObject); } if (onRemoval != null) { onRemoval(datasetObject); } lock (m_Lock) { m_Datasets.Graph.RemoveVertex(datasetToDelete); m_Datasets.Datasets.Remove(datasetToDelete); } m_Diagnostics.Log( LevelToLog.Trace, HostConstants.LogPrefix, string.Format( CultureInfo.InvariantCulture, Resources.Project_LogMessage_DeletedDataset_WithId, datasetToDelete)); } }
public void Close() { var id = new DatasetId(); var endpoint = EndpointIdExtensions.CreateEndpointIdForCurrentProcess(); var networkId = NetworkIdentifier.ForLocalMachine(); var systemDiagnostics = new SystemDiagnostics((p, s) => { }, null); var datasetCommands = new Mock<IDatasetApplicationCommands>(); { datasetCommands.Setup(d => d.Close()) .Returns( Task.Factory.StartNew( () => { }, new CancellationToken(), TaskCreationOptions.None, new CurrentThreadTaskScheduler())) .Verifiable(); } var commandHub = new Mock<ISendCommandsToRemoteEndpoints>(); { commandHub.Setup(h => h.HasCommandFor(It.IsAny<EndpointId>(), It.IsAny<Type>())) .Returns(true); commandHub.Setup(h => h.CommandsFor<IDatasetApplicationCommands>(It.IsAny<EndpointId>())) .Returns(datasetCommands.Object); } var notifications = new Mock<IDatasetApplicationNotifications>(); var notificationHub = new Mock<INotifyOfRemoteEndpointEvents>(); { notificationHub.Setup(n => n.HasNotificationsFor(It.IsAny<EndpointId>())) .Returns(true); notificationHub.Setup(n => n.NotificationsFor<IDatasetApplicationNotifications>(It.IsAny<EndpointId>())) .Callback<EndpointId>(e => Assert.AreSame(endpoint, e)) .Returns(notifications.Object); } var info = new DatasetOnlineInformation( id, endpoint, networkId, commandHub.Object, notificationHub.Object, systemDiagnostics); info.Close(); datasetCommands.Verify(d => d.Close(), Times.Once()); }
public override int GetHashCode() { int hash = 1; if (modelMetadataCase_ == ModelMetadataOneofCase.TranslationModelMetadata) { hash ^= TranslationModelMetadata.GetHashCode(); } if (modelMetadataCase_ == ModelMetadataOneofCase.ImageClassificationModelMetadata) { hash ^= ImageClassificationModelMetadata.GetHashCode(); } if (modelMetadataCase_ == ModelMetadataOneofCase.TextClassificationModelMetadata) { hash ^= TextClassificationModelMetadata.GetHashCode(); } if (modelMetadataCase_ == ModelMetadataOneofCase.ImageObjectDetectionModelMetadata) { hash ^= ImageObjectDetectionModelMetadata.GetHashCode(); } if (modelMetadataCase_ == ModelMetadataOneofCase.TextExtractionModelMetadata) { hash ^= TextExtractionModelMetadata.GetHashCode(); } if (modelMetadataCase_ == ModelMetadataOneofCase.TextSentimentModelMetadata) { hash ^= TextSentimentModelMetadata.GetHashCode(); } if (Name.Length != 0) { hash ^= Name.GetHashCode(); } if (DisplayName.Length != 0) { hash ^= DisplayName.GetHashCode(); } if (DatasetId.Length != 0) { hash ^= DatasetId.GetHashCode(); } if (createTime_ != null) { hash ^= CreateTime.GetHashCode(); } if (updateTime_ != null) { hash ^= UpdateTime.GetHashCode(); } if (DeploymentState != 0) { hash ^= DeploymentState.GetHashCode(); } if (Etag.Length != 0) { hash ^= Etag.GetHashCode(); } hash ^= Labels.GetHashCode(); hash ^= (int)modelMetadataCase_; if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
public void AvailableNotifications() { var commandHub = new Mock<ISendCommandsToRemoteEndpoints>(); var id = new DatasetId(); var endpoint = EndpointIdExtensions.CreateEndpointIdForCurrentProcess(); var networkId = NetworkIdentifier.ForLocalMachine(); var systemDiagnostics = new SystemDiagnostics((p, s) => { }, null); var notificationList = new Dictionary<Type, INotificationSet> { { typeof(IMockNotificationSetWithEventHandler), new Mock<IMockNotificationSetWithEventHandler>().Object }, { typeof(IMockNotificationSetWithTypedEventHandler), new Mock<IMockNotificationSetWithTypedEventHandler>().Object }, { typeof(IMockNotificationSetForInternalUse), new Mock<IMockNotificationSetForInternalUse>().Object }, }; var datasetNotifications = new Mock<IDatasetApplicationNotifications>(); var notificationHub = new Mock<INotifyOfRemoteEndpointEvents>(); { notificationHub.Setup(h => h.AvailableNotificationsFor(It.IsAny<EndpointId>())) .Returns(notificationList.Keys); notificationHub.Setup(h => h.NotificationsFor(It.IsAny<EndpointId>(), It.IsAny<Type>())) .Returns<EndpointId, Type>((e, t) => notificationList[t]); notificationHub.Setup(n => n.NotificationsFor<IDatasetApplicationNotifications>(It.IsAny<EndpointId>())) .Callback<EndpointId>(e => Assert.AreSame(endpoint, e)) .Returns(datasetNotifications.Object); } var info = new DatasetOnlineInformation( id, endpoint, networkId, commandHub.Object, notificationHub.Object, systemDiagnostics); var notifications = info.AvailableNotifications(); Assert.AreEqual(2, notifications.Count()); }