static MatchConflictTestInput CaseFour(Exclusivity conflictingIdExclusivity) { int[] expectedAssignments = { 2, 1, (int)ReservedDataIDs.Invalid, 2, 1 }; var exclusivities = new [] { Exclusivity.Reserved, Exclusivity.Shared, conflictingIdExclusivity, Exclusivity.ReadOnly, Exclusivity.ReadOnly }; var ratings = new [] { // because this is the only option for our only reserved query, // we let it take precedence over shared data with a better rating new Dictionary <int, float> { { 2, 0.9f } }, // this shared data query has another option - id 1 - so it gets to use that this time. new Dictionary <int, float> { { 2, 1f }, { 1, 0.7f } }, // this shared data query has no other option, and it will not get to match this time. new Dictionary <int, float> { { 2, 0.8f } }, // these two are readonly and should match new Dictionary <int, float> { { 2, 0.7f } }, new Dictionary <int, float> { { 1, 0.8f } }, }; return(new MatchConflictTestInput(exclusivities, ratings, expectedAssignments)); }
// if all queries requested different data IDs for their best matches, // they all get them, regardless of exclusivity rules or priority. static MatchConflictTestInput CaseTwo(Exclusivity exclusivity, bool mixedPriority = false) { int[] expectedAssignments = { 2, 1, 4, 3 }; var exclusivities = new [] { exclusivity, exclusivity, exclusivity, exclusivity }; var ratings = new [] { new Dictionary <int, float> { { 2, 1f }, { 1, 0.3f } }, new Dictionary <int, float> { { 1, 0.9f }, { 3, 0.5f } }, new Dictionary <int, float> { { 4, 0.9f }, { 2, 0.4f } }, new Dictionary <int, float> { { 3, 0.8f }, { 1, 0.6f } } }; // we want to sometimes check that this test case produces the same results regardless of priority var priorities = mixedPriority ? new[] { MarsEntityPriority.Low, MarsEntityPriority.Normal, MarsEntityPriority.Normal, MarsEntityPriority.High } : null; return(new MatchConflictTestInput(exclusivities, ratings, expectedAssignments, priorities)); }
/// <summary> /// Use this overload to get arguments for the next instance of a replicated Proxy /// </summary> /// <param name="original">The original / previous spawn's arguments</param> public TryBestMatchArguments(TryBestMatchArguments original) { conditions = original.conditions; exclusivity = original.exclusivity; // we can re-use the internal ratings structure for all instances of a spawn ratings = original.ratings; }
int Add(QueryMatchID matchId, IMRObject objectRef, Exclusivity exclusivity, int bestMatchDataId, bool isRequired, CachedTraitCollection traitCache, ProxyConditions condition, ProxyTraitRequirements requirements, ConditionRatingsData rating, HashSet <int> matchSet, Dictionary <int, float> flatRatings, QueryResult result) { var index = GetInsertionIndex(); QueryMatchIds[index] = matchId; ObjectReferences[index] = objectRef; Exclusivities[index] = exclusivity; Required[index] = isRequired; BestMatchDataIds[index] = bestMatchDataId; CachedTraits[index] = traitCache; Conditions[index] = condition; TraitRequirements[index] = requirements; RelationMemberships[index] = null; ConditionRatings[index] = rating; ReducedConditionRatings[index] = flatRatings; ConditionMatchSets[index] = matchSet; QueryResults[index] = result; m_Count++; ValidIndices.Add(index); return(index); }
static Relations MakeSetWithNonRequiredChildren( out SetQueryResult result, Dictionary <IMRObject, ChildMatchInfo> childrenExpectedMatchInfo, List <IMRObject> nonRequiredChildren, Exclusivity childrenExclusivity = Exclusivity.ReadOnly) { var relations = MakeBaseSet(out result, childrenExpectedMatchInfo); var childrenList = new List <IMRObject>(childrenExpectedMatchInfo.Keys); for (var i = 0; i < childrenList.Count; ++i) { var child = childrenList[i]; var args = relations.children[child]; // Half of the children will be non-required. if (i < childrenList.Count / 2) { nonRequiredChildren.Add(child); args.required = false; } args.tryBestMatchArgs.exclusivity = childrenExclusivity; relations.children[child] = args; } return(relations); }
public void Exclusivity() { var deal = new Deal( Guid.NewGuid(), "Test", "Deals.Debt", null, null); var exclusivity = new Exclusivity( Guid.NewGuid(), DateTime.Today); deal.Confers.Add(exclusivity); var classifications = _deals.Classify(deal); Assert.That(classifications.ContainsKey(exclusivity)); classifications[exclusivity].ForEach(TestContext.WriteLine); var range = ( from classExpression in classifications[exclusivity] join dataPropertyDomain in _deals.Get <IDataPropertyDomain>() on classExpression equals dataPropertyDomain.Domain join dataPropertyRange in _deals.Get <IDataPropertyRange>() on dataPropertyDomain.DataPropertyExpression equals dataPropertyRange.DataPropertyExpression from annotation in dataPropertyRange.Annotations where annotation.Property == _validation.RangeValidated select dataPropertyRange ).FirstOrDefault(); Assert.That(range, Is.Not.Null); Assert.That(range.DataPropertyExpression.LocalName, Is.EqualTo("Date")); Assert.That(range.Range, Is.EqualTo(ReservedVocabulary.DateTime)); }
public void ExclusivityEqualsZero() { var actors = new List <Actor> { new Actor(1, "a1"), new Actor(2, "a2") }; var network = new Network { Actors = actors, Layers = new List <Layer> { new Layer { Edges = new List <Edge> { new Edge(actors[0], actors[1]) } }, new Layer { Edges = new List <Edge> { new Edge(actors[0], actors[1]) } } } }; var community = new Community(actors); Assert.Equal(0, Exclusivity.Compute(community, network)); }
public SetChildArgs(ProxyConditions conditions, Exclusivity exclusivity, bool required, ProxyTraitRequirements requirements = null) { this.required = required; tryBestMatchArgs = new TryBestMatchArguments(conditions, exclusivity); TraitRequirements = requirements; }
// a batch version of DataAvailableForUse that only has to lookup used data once for the batch internal void FilterAvailableData(HashSet <int> dataIDs, int queryID, Exclusivity exclusivity) { k_IDsToRemove.Clear(); // Data should not be used again if this query is already using it HashSet <int> dataUsedByQuery; if (DataUsedByQueries.TryGetValue(queryID, out dataUsedByQuery)) { dataIDs.ExceptWithNonAlloc(dataUsedByQuery); } var isReadOnly = exclusivity == Exclusivity.ReadOnly; var isShared = exclusivity == Exclusivity.Shared; foreach (var dataID in dataIDs) { if (isReadOnly || !ReservedData.ContainsKey(dataID) && (isShared || !SharedDataUsersCounter.ContainsKey(dataID))) { continue; } k_IDsToRemove.Add(dataID); } dataIDs.ExceptWithNonAlloc(k_IDsToRemove); }
public void MarkDataUsedForUpdates(int dataID, QueryMatchID queryMatchID, Exclusivity exclusivity) { if (DataUsedByQueryMatches.ContainsKey(queryMatchID)) { Debug.LogErrorFormat( "Query '{0}' is already using data. If you wish to mark new data as used, " + "first call UnmarkDataUsedForUpdates with this query ID.", queryMatchID); return; } DataUsedByQueryMatches[queryMatchID] = dataID; var queryID = queryMatchID.queryID; HashSet <int> dataUsedByQuery; if (DataUsedByQueries.TryGetValue(queryID, out dataUsedByQuery)) { dataUsedByQuery.Add(dataID); } else { var set = Pools.DataIdHashSets.Get(); set.Add(dataID); DataUsedByQueries[queryID] = set; } ReserveDataForQueryMatch(dataID, queryMatchID, exclusivity); }
internal static TryBestMatchArguments GetMatchArgs(this GameObject go, Exclusivity exclusivity = Exclusivity.ReadOnly) { var conditionsRoot = go.GetComponent <TestMRObject>(); var conditions = ProxyConditions.FromGenericIMRObject(conditionsRoot); return(new TryBestMatchArguments(conditions, exclusivity)); }
public QueueOptions(string name, string routingKey, Durability isDurable, AutoDeletion isAutoDelete, Exclusivity isExclusive) { Name = name; RoutingKey = routingKey; IsDurable = isDurable; IsAutoDelete = isAutoDelete; IsExclusive = isExclusive; }
internal static QueryArgs GetQueryArgs(this GameObject go, Exclusivity exclusivity = Exclusivity.ReadOnly) { return(new QueryArgs { conditions = ProxyConditions.FromGameObject <Proxy>(go), exclusivity = exclusivity, commonQueryData = new CommonQueryData() }); }
public void QueryDataDirty_DataMarkedAsUsedAndDataChanged_ReturnsTrue(Exclusivity exclusivity) { m_Db.MarkDataUsedForUpdates(k_DataID, m_QueryMatchId, exclusivity); m_Db.GetTraitProvider(out MARSTraitDataProvider <float> floatTraits); for (var i = 0; i < k_ConditionsPerType; ++i) { floatTraits.AddOrUpdateTrait(k_DataID, TraitNameForIndex <float>(i), default(float)); } Assert.True(m_Db.QueryDataDirty(m_QueryMatchId)); }
internal bool DataAvailableForUse(int dataID, int queryID, Exclusivity exclusivity) { // Data should not be used again if this query is already using it HashSet <int> dataUsedByQuery; if (DataUsedByQueries.TryGetValue(queryID, out dataUsedByQuery) && dataUsedByQuery.Contains(dataID)) { return(false); } return(exclusivity == Exclusivity.ReadOnly || !ReservedData.ContainsKey(dataID) && (exclusivity == Exclusivity.Shared || !SharedDataUsersCounter.ContainsKey(dataID))); }
// if two queries request one ID, what do we expect for each exclusivity case ? static MatchConflictTestInput CaseThree(Exclusivity secondDataExclusivity, int expectedSecondId) { int[] expectedAssignments = { 2, expectedSecondId }; var exclusivities = new [] { Exclusivity.Reserved, secondDataExclusivity }; var ratings = new [] { new Dictionary <int, float> { { 2, 1f }, { 3, 0.8f } }, // when readonly, this second query should get id 2. otherwise, id 1. new Dictionary <int, float> { { 2, 0.9f }, { 1, 0.7f } }, }; return(new MatchConflictTestInput(exclusivities, ratings, expectedAssignments)); }
public void Add(int index, Exclusivity exclusivity) { switch (exclusivity) { case Exclusivity.Reserved: Reserved.Add(index); break; case Exclusivity.Shared: Shared.Add(index); break; case Exclusivity.ReadOnly: ReadOnly.Add(index); break; } }
public void AddOrUpdate(Exclusivity exc, string appNo, string productNo) { ImporterCacheKeyApplication key = new ImporterCacheKeyApplication { AppNo = appNo, ProductNo = productNo }; Guid appId, drugId; if (AppToApp.TryGetValue(key, out appId) && AppToDrug.TryGetValue(key, out drugId)) { exc.Id = Guid.NewGuid(); exc.ApplicationId = appId; exc.DrugId = drugId; Exclusivities.Add(exc); } }
static bool FilterMember(Dictionary <int, QueryMatchID> reservedData, Dictionary <int, int> sharedDataUsers, HashSet <int> dataIDs, Exclusivity exclusivity) { if (s_IdsToRemove.Length < dataIDs.Count) { Array.Resize(ref s_IdsToRemove, dataIDs.Count + MARSMemoryOptions.ResizeHeadroom); } var removeCounter = 0; var isReadOnly = exclusivity == Exclusivity.ReadOnly; // read only matches are valid if not already used by the query, so we're done with this one if (!isReadOnly) { var isShared = exclusivity == Exclusivity.Shared; foreach (var dataId in dataIDs) { var idIsPreviouslyReserved = reservedData.ContainsKey(dataId); // if it's previously reserved, neither shared nor reserved can use it if (idIsPreviouslyReserved) { s_IdsToRemove[removeCounter] = dataId; removeCounter++; continue; } // if this query is reserved & something has previously used it as shared, can't use it if (!isShared && sharedDataUsers.ContainsKey(dataId)) { s_IdsToRemove[removeCounter] = dataId; removeCounter++; } } } for (int i = 0; i < removeCounter; i++) { dataIDs.Remove(s_IdsToRemove[i]); } // as int as there are any data ids remaining in the set, then this query passes this stage return(dataIDs.Count != 0); }
static QueryArgs GetTestQueryArgs(Proxy proxy, Exclusivity exclusivity = Exclusivity.ReadOnly) { return(new QueryArgs { commonQueryData = new CommonQueryData { timeOut = 20f, overrideTimeout = false, reacquireOnLoss = true, updateMatchInterval = 0.1f }, conditions = new ProxyConditions(proxy), exclusivity = exclusivity, onAcquire = (result) => { }, onMatchUpdate = (result) => { }, onLoss = (result) => { }, onTimeout = (queryArgs) => { } }); }
internal void ReserveDataForQueryMatch(int dataID, QueryMatchID queryMatchID, Exclusivity exclusivity) { MarkDataAsChanged(dataID, queryMatchID); switch (exclusivity) { case Exclusivity.Shared: int count; if (SharedDataUsersCounter.TryGetValue(dataID, out count)) { SharedDataUsersCounter[dataID] = count + 1; } else { SharedDataUsersCounter[dataID] = 1; } break; case Exclusivity.Reserved: if (!ReservedData.ContainsKey(dataID)) { ReservedData[dataID] = queryMatchID; } break; } }
public override int GetHashCode() { int hash = 1; if (TrainerId != 0L) { hash ^= TrainerId.GetHashCode(); } if (TrainerKind != 0) { hash ^= TrainerKind.GetHashCode(); } if (ScoutMethod != 0) { hash ^= ScoutMethod.GetHashCode(); } if (Exclusivity != 0L) { hash ^= Exclusivity.GetHashCode(); } if (TrainerBaseId != 0) { hash ^= TrainerBaseId.GetHashCode(); } if (Type != 0) { hash ^= Type.GetHashCode(); } if (Rarity != 0) { hash ^= Rarity.GetHashCode(); } if (Role != 0) { hash ^= Role.GetHashCode(); } if (MonsterId != 0UL) { hash ^= MonsterId.GetHashCode(); } if (Move1Id != 0) { hash ^= Move1Id.GetHashCode(); } if (Move2Id != 0) { hash ^= Move2Id.GetHashCode(); } if (Move3Id != 0) { hash ^= Move3Id.GetHashCode(); } if (Move4Id != 0) { hash ^= Move4Id.GetHashCode(); } if (Weakness != 0) { hash ^= Weakness.GetHashCode(); } if (StoryQuest != 0UL) { hash ^= StoryQuest.GetHashCode(); } if (U16 != 0) { hash ^= U16.GetHashCode(); } if (Passive1Id != 0) { hash ^= Passive1Id.GetHashCode(); } if (Passive2Id != 0) { hash ^= Passive2Id.GetHashCode(); } if (Passive3Id != 0) { hash ^= Passive3Id.GetHashCode(); } if (Passive4Id != 0) { hash ^= Passive4Id.GetHashCode(); } if (TeamSkill1Id != 0) { hash ^= TeamSkill1Id.GetHashCode(); } if (TeamSkill2Id != 0) { hash ^= TeamSkill2Id.GetHashCode(); } if (TeamSkill3Id != 0) { hash ^= TeamSkill3Id.GetHashCode(); } if (TeamSkill4Id != 0) { hash ^= TeamSkill4Id.GetHashCode(); } if (TeamSkill5Id != 0) { hash ^= TeamSkill5Id.GetHashCode(); } if (U26 != 0) { hash ^= U26.GetHashCode(); } if (U27 != 0) { hash ^= U27.GetHashCode(); } if (Number != 0) { hash ^= Number.GetHashCode(); } if (ScheduleId.Length != 0) { hash ^= ScheduleId.GetHashCode(); } if (ExScheduleId.Length != 0) { hash ^= ExScheduleId.GetHashCode(); } if (_unknownFields != null) { hash ^= _unknownFields.GetHashCode(); } return(hash); }
public ExclusivityPersonDTO(Exclusivity e) { ExclusivityID = e.ExclusivityID; ExclusivityName = e.ExclusivityName; }
public bool Equals(DestinyVendorItemDefinition input) { if (input == null) { return(false); } return (( VendorItemIndex == input.VendorItemIndex || (VendorItemIndex.Equals(input.VendorItemIndex)) ) && ( ItemHash == input.ItemHash || (ItemHash.Equals(input.ItemHash)) ) && ( Quantity == input.Quantity || (Quantity.Equals(input.Quantity)) ) && ( FailureIndexes == input.FailureIndexes || (FailureIndexes != null && FailureIndexes.SequenceEqual(input.FailureIndexes)) ) && ( Currencies == input.Currencies || (Currencies != null && Currencies.SequenceEqual(input.Currencies)) ) && ( RefundPolicy == input.RefundPolicy || (RefundPolicy != null && RefundPolicy.Equals(input.RefundPolicy)) ) && ( RefundTimeLimit == input.RefundTimeLimit || (RefundTimeLimit.Equals(input.RefundTimeLimit)) ) && ( CreationLevels == input.CreationLevels || (CreationLevels != null && CreationLevels.SequenceEqual(input.CreationLevels)) ) && ( DisplayCategoryIndex == input.DisplayCategoryIndex || (DisplayCategoryIndex.Equals(input.DisplayCategoryIndex)) ) && ( CategoryIndex == input.CategoryIndex || (CategoryIndex.Equals(input.CategoryIndex)) ) && ( OriginalCategoryIndex == input.OriginalCategoryIndex || (OriginalCategoryIndex.Equals(input.OriginalCategoryIndex)) ) && ( MinimumLevel == input.MinimumLevel || (MinimumLevel.Equals(input.MinimumLevel)) ) && ( MaximumLevel == input.MaximumLevel || (MaximumLevel.Equals(input.MaximumLevel)) ) && ( Action == input.Action || (Action != null && Action.Equals(input.Action)) ) && ( DisplayCategory == input.DisplayCategory || (DisplayCategory != null && DisplayCategory.Equals(input.DisplayCategory)) ) && ( InventoryBucketHash == input.InventoryBucketHash || (InventoryBucketHash.Equals(input.InventoryBucketHash)) ) && ( VisibilityScope == input.VisibilityScope || (VisibilityScope != null && VisibilityScope.Equals(input.VisibilityScope)) ) && ( PurchasableScope == input.PurchasableScope || (PurchasableScope != null && PurchasableScope.Equals(input.PurchasableScope)) ) && ( Exclusivity == input.Exclusivity || (Exclusivity != null && Exclusivity.Equals(input.Exclusivity)) ) && ( IsOffer == input.IsOffer || (IsOffer != null && IsOffer.Equals(input.IsOffer)) ) && ( IsCrm == input.IsCrm || (IsCrm != null && IsCrm.Equals(input.IsCrm)) ) && ( SortValue == input.SortValue || (SortValue.Equals(input.SortValue)) ) && ( ExpirationTooltip == input.ExpirationTooltip || (ExpirationTooltip != null && ExpirationTooltip.Equals(input.ExpirationTooltip)) ) && ( RedirectToSaleIndexes == input.RedirectToSaleIndexes || (RedirectToSaleIndexes != null && RedirectToSaleIndexes.SequenceEqual(input.RedirectToSaleIndexes)) ) && ( SocketOverrides == input.SocketOverrides || (SocketOverrides != null && SocketOverrides.SequenceEqual(input.SocketOverrides)) ) && ( Unpurchasable == input.Unpurchasable || (Unpurchasable != null && Unpurchasable.Equals(input.Unpurchasable)) )); }
void Filter(Dictionary <int, HashSet <int> > dataUsedByQueries, Dictionary <int, QueryMatchID> reservedData, Dictionary <int, int> sharedDataUsers, HashSet <int> dataIDs, QueryMatchID matchId, Exclusivity exclusivity) { if (s_IDsToRemove.Length < dataIDs.Count) { Array.Resize(ref s_IDsToRemove, dataIDs.Count + MARSMemoryOptions.ResizeHeadroom); } var removeCounter = 0; var queryId = matchId.queryID; // Data should not be used again if this query is already using it if (dataUsedByQueries.TryGetValue(queryId, out var dataUsedByQuery)) { dataIDs.ExceptWithNonAlloc(dataUsedByQuery); } var isReadOnly = exclusivity == Exclusivity.ReadOnly; // read only matches are valid if not already used by the query, so we're done with this one if (!isReadOnly) { var isShared = exclusivity == Exclusivity.Shared; foreach (var dataID in dataIDs) { var idIsPreviouslyReserved = reservedData.ContainsKey(dataID); // if it's previously reserved, neither shared nor reserved can use it if (idIsPreviouslyReserved) { s_IDsToRemove[removeCounter] = dataID; removeCounter++; continue; } // if this query is reserved & something has previously used it as shared, can't use it if (!isShared && sharedDataUsers.ContainsKey(dataID)) { s_IDsToRemove[removeCounter] = dataID; removeCounter++; } } } #if UNITY_EDITOR if (removeCounter > 0) { // collect information about what, if anything, got filtered, for editor debug purposes var filteredList = Pools.IntLists.Get(); for (var j = 0; j < removeCounter; j++) { filteredList.Add(s_IDsToRemove[j]); } DebugFilteredResults.Add(matchId, filteredList); } #endif for (int i = 0; i < removeCounter; i++) { dataIDs.Remove(s_IDsToRemove[i]); } }
public void QueryDataDirty_DataMarkedAsUsedAndNoDataChanged_ReturnsFalse(Exclusivity exclusivity) { m_Db.MarkDataUsedForUpdates(m_DataID, m_QueryMatchId, exclusivity); Assert.True(m_Db.QueryDataDirty(m_QueryMatchId)); }
public static async Task <int> Import(string baseDir, SiteConfig config) { string workDir = Path.Combine(baseDir, config.ShortName); Directory.CreateDirectory(workDir); //using (HttpClient httpClient = new HttpClient()) //{ // HttpResponseMessage msg = await httpClient.GetAsync(new Uri(config.DownloadFiles[0])); // using (StreamWriter sw = new StreamWriter(Path.Combine(workDir, "orange.zip"))) // { // await (msg.Content.CopyToAsync(sw.BaseStream)); // sw.Flush() // } //} //FastZip fz = new FastZip(); //fz.ExtractZip(Path.Combine(workDir, "orange.zip"), workDir, null); using (StreamReader sr = new StreamReader(Path.Combine(workDir, "products.txt"))) { var csv = new CsvReader(sr); csv.Configuration.RegisterClassMap <OrangeBook_ProductMap>(); csv.Configuration.Delimiter = "~"; var records = csv.GetRecords <OrangeBook_Product>(); foreach (var rec in records) { string[] dfRoute = rec.DfRoute.Split(';'); int? df = EnumCache.Instance.GetNullableValue(typeof(DosageForm), dfRoute[0]); int? route = EnumCache.Instance.GetNullableValue(typeof(RouteOfAdministration), dfRoute[1]); var drug = new Drug { Ingredient = rec.Ingredient, TradeNames = new List <string> { rec.TradeName }, DosageForm = (DosageForm?)df, DosageFormText = dfRoute[0], RouteOfAdmin = (RouteOfAdministration?)route, RouteOfAdminText = dfRoute[1], DrugType = rec.Type, Strengths = new List <string> { rec.Strength } }; var application = new Application { ApplicationType = rec.ApplicationType, Applicant = rec.Applicant, ApplicantFullName = rec.ApplicantFullName, ApplicationNumber = rec.ApplicationNumber, ApprovalDate = rec.ApprovalDate, ProductNumber = rec.ProductNumber, ReferenceListedDrug = rec.Rld, ReferenceStandard = rec.Rs, TeCode = rec.TeCode }; ImporterCache.Instance.AddOrUpdate(drug, application); } } using (StreamReader sr = new StreamReader(Path.Combine(workDir, "exclusivity.txt"))) { var csv = new CsvReader(sr); csv.Configuration.RegisterClassMap <OrangeBook_ExclusivityMap>(); csv.Configuration.Delimiter = "~"; var records = csv.GetRecords <OrangeBook_Exclusivity>(); foreach (var rec in records) { var exclusivity = new Exclusivity { ExclusivityCode = rec.ExclusivityCode, ExpirationDate = rec.ExclusivityDate }; ImporterCache.Instance.AddOrUpdate(exclusivity, rec.ApplicationNumber, rec.ProductNumber); } } using (StreamReader sr = new StreamReader(Path.Combine(workDir, "patent.txt"))) { var csv = new CsvReader(sr); csv.Configuration.RegisterClassMap <OrangeBook_PatentMap>(); csv.Configuration.Delimiter = "~"; var records = csv.GetRecords <OrangeBook_Patent>(); foreach (var rec in records) { var patent = new Patent { PatentNumber = rec.PatentNumber, ExpirationDate = rec.PatentExpireDate }; ImporterCache.Instance.AddOrUpdate(patent, rec.ApplicationNumber, rec.ProductNumber); } } return(0); }
static TryBestMatchArguments MatchArgsForConditions(ProxyConditions conditions, Exclusivity exclusivity) { return(new TryBestMatchArguments(conditions, exclusivity)); }
public TryBestMatchArguments(ProxyConditions conditions, Exclusivity exclusivity) { this.conditions = conditions; this.exclusivity = exclusivity; ratings = new ConditionRatingsData(conditions); }
/// <summary> /// Tells the database that data will be used to update a query match /// </summary> /// <param name="dataID">ID for the data used by a query match</param> /// <param name="queryMatchID">ID of the query match using this data</param> /// <param name="exclusivity">Specification of how the data should be reserved</param> public static void MarkDataUsedForUpdates(this IUsesDatabaseQuerying obj, int dataID, QueryMatchID queryMatchID, Exclusivity exclusivity) { IUsesDatabaseQueryingMethods.MarkDataUsedForUpdates(dataID, queryMatchID, exclusivity); }