public TermEquality(Term term1, Term term2) { this.term1 = term1; this.term2 = term2; terms.Add(term1); terms.Add(term2); }
public static string GetStringParam(Term[] arg, ref SMath.Math.Store context) { var dbl = GetNumberParam(arg, ref context).obj as SMath.Math.Numeric.TDouble; if (!dbl.isText) throw new SMath.Manager.EvaluationException(Errors.ArgumentMustBeString); return dbl.ToString().Trim('"'); }
private static IEnumerable<Term> SortChildTerms(Term parentTerm) { var sortedChildren = new List<Term>(); // If custom sort order is set, build term list in sorted order if (!string.IsNullOrEmpty(parentTerm.CustomSortOrder)) { var children = parentTerm.Terms.ToList(); var sortedIds = parentTerm.CustomSortOrder.Split(':').Select(id => new Guid(id)).ToList(); foreach (var sortedId in sortedIds) { var sortedTerm = children.SingleOrDefault(term => term.Id.Equals(sortedId)); if (sortedTerm != null) { sortedChildren.Add(sortedTerm); } } // Add unsorted terms at the end of the collection var unsortedTerms = children.Where(term => !sortedIds.Contains(term.Id)); sortedChildren.AddRange(unsortedTerms); } else { sortedChildren = parentTerm.Terms.ToList(); } return sortedChildren; }
public void Read(IndexInput input, FieldInfos fieldInfos) { this.term = null; // invalidate cache int start = input.ReadVInt(); int length = input.ReadVInt(); int totalLength = start + length; if (preUTF8Strings) { text.SetLength(totalLength); input.ReadChars(text.result, start, length); } else { if (dirty) { // Fully convert all bytes since bytes is dirty UnicodeUtil.UTF16toUTF8(text.result, 0, text.length, bytes); bytes.SetLength(totalLength); input.ReadBytes(bytes.result, start, length); UnicodeUtil.UTF8toUTF16(bytes.result, 0, totalLength, text); dirty = false; } else { // Incrementally convert only the UTF8 bytes that are new: bytes.SetLength(totalLength); input.ReadBytes(bytes.result, start, length); UnicodeUtil.UTF8toUTF16(bytes.result, start, length, text); } } this.field = fieldInfos.FieldName(input.ReadVInt()); }
private BinaryTerm(RiakFluentSearch search, string field, Op op, Term left) : base(search, field) { _op = op; _left = left; left.Owner = this; }
public static String stringValueFromTerm(Term t) { var result = string.Empty; Term tt = t.getTerm(); if (tt is Struct) { result = ((Struct)tt).getName(); if (result.Equals(".")) { result = tt.ToString(); } } else if (tt is Number) { Number n = (Number)tt; if (n is Int) { result = n.intValue().ToString(); } else { result = n.ToString(); } } return result; }
public Check_Next_Free_3(Term a1, Term a2, Term a3, Predicate cont) { arg1 = a1; arg2 = a2; arg3 = a3; this.cont = cont; }
public SeatArrangement(Term source,ISimpleEmployee agent, DateTime start, DateTime end) { Agent = agent; Start = start; End = end; Source = source; }
public override void setArgument(Term[] args, Predicate cont) { arg1 = args[0]; arg2 = args[1]; arg3 = args[2]; this.cont = cont; }
public static PlanItem[] MakePlan(PlanItem[] plan, Term t) { if (t is Name) return plan.Concat(Term(t)).ToArray(); var app = (App)t; if (plan.Length == 0) { if (app.left is App) { return MakePlan(MakePlan(plan, app.left), app.right); } else { var name = (Name)app.left; return MakePlan(plan, app.right).Concat(new[] { new PlanItem { Card = name.name, ToLeft = true } }).ToArray(); } } return MakePlan( MakePlan( plan.Concat(SK()).ToArray(), app.left) .Concat(SK()) .Concat(Term("I")).ToArray(), app.right); }
/// <summary> /// Create a new AutomatonQuery from an <seealso cref="Automaton"/>. /// </summary> /// <param name="term"> Term containing field and possibly some pattern structure. The /// term text is ignored. </param> /// <param name="automaton"> Automaton to run, terms that are accepted are considered a /// match. </param> public AutomatonQuery(Term term, Automaton automaton) : base(term.Field) { this.Term = term; this.Automaton_Renamed = automaton; this.Compiled = new CompiledAutomaton(automaton); }
public void EqualsTestEquality() { Term o1 = new Term("foo"); Term o2 = new Term("foo"); Assert.IsTrue(o1.Equals(o2)); }
public void Read(IndexInput input, FieldInfos fieldInfos) { this.Term = null; // invalidate cache NewSuffixStart = input.ReadVInt(); int length = input.ReadVInt(); int totalLength = NewSuffixStart + length; Debug.Assert(totalLength <= ByteBlockPool.BYTE_BLOCK_SIZE - 2, "termLength=" + totalLength + ",resource=" + input); if (Bytes.Bytes.Length < totalLength) { Bytes.Grow(totalLength); } Bytes.Length = totalLength; input.ReadBytes(Bytes.Bytes, NewSuffixStart, length); int fieldNumber = input.ReadVInt(); if (fieldNumber != CurrentFieldNumber) { CurrentFieldNumber = fieldNumber; // NOTE: too much sneakiness here, seriously this is a negative vint?! if (CurrentFieldNumber == -1) { Field = ""; } else { Debug.Assert(fieldInfos.FieldInfo(CurrentFieldNumber) != null, CurrentFieldNumber.ToString()); Field = String.Intern(fieldInfos.FieldInfo(CurrentFieldNumber).Name); } } else { Debug.Assert(Field.Equals(fieldInfos.FieldInfo(fieldNumber).Name), "currentFieldNumber=" + CurrentFieldNumber + " field=" + Field + " vs " + fieldInfos.FieldInfo(fieldNumber) == null ? "null" : fieldInfos.FieldInfo(fieldNumber).Name); } }
public Rule(Term.Term antecent, Term.Term consequence) { if (antecent == null) throw new ArgumentNullException("antecent"); if (consequence == null) throw new ArgumentNullException("consequence"); _antecent = antecent; _consequence = consequence; }
public static void TermLink(Term from, Term to) { if (from == null || to == null) return; from.To = to; to.From = from; }
public void ImportTerm(Term parentTerm, XElement termElement, bool recurse = true) { try { //Get the LCID. //TODO: Support importing LCIDs int lcid = CultureInfo.CurrentCulture.LCID; //Get the term name string termName = (string)termElement.Attribute("Name"); //Check if the term exist Term term = parentTerm.GetTerms(termName, lcid, true, StringMatchOption.ExactMatch, 1, false).FirstOrDefault(); //If the term does not exist, create it if (term == null) { term = parentTerm.CreateTerm(termName, lcid); } //Create the child terms if (recurse) { foreach (XElement childElement in termElement.Elements()) { ImportTerm(term, childElement, recurse); } } } catch (Exception e) { } }
public TermSpans(TermPositions positions, Term term) { this.internalPositions = positions; this.term = term; internalDoc = - 1; }
/// <summary> /// Define a function. /// </summary> /// <param name="body">The function body.</param> /// <returns>A function.</returns> public static Identity Identity(Func<Term, Term, Identity> body) { var p = body.Method.GetParameters(); var x0 = new Term(0); var x1 = new Term(1); return body(x0, x1); }
/// <summary> /// Define a function. /// </summary> /// <param name="body">The function body.</param> /// <returns>A function.</returns> public static Function<Func<double, double, double>> Function(Func<Term, Term, Function<Func<double, double, double>>> body) { var p = body.Method.GetParameters(); var x0 = new Term(0); var x1 = new Term(1); return body(x0, x1).With(p); }
private TVec(Term[] left, Term[] right, Func<Term, Term, Term> elemOp) { Contract.Assume(left.Length == right.Length); terms = new Term[left.Length]; for (int i = 0; i < terms.Length; ++i) terms[i] = elemOp(left[i], right[i]); }
/// <summary> /// Initializes a new instance of the <see cref="AbstractBaseMutexEvaluator"/> class. /// </summary> /// <param name="conditional">The conditional.</param> /// <param name="predicate">The predicate.</param> /// <param name="subject">The subject.</param> public AbstractBaseMutexEvaluator(Term conditional, Term predicate, Term subject) { _conditionalTerm = conditional; _predicate = predicate; _subjectTerm = subject; _sorter = new Dictionary<object, List<WME>>(); }
private static Optimizer Find(Term term, int dist) { if(term==null) return null; Term next=term.next; Term.TermType type=term.type; switch(type){ case Term.TermType.CHAR: case Term.TermType.REG: case Term.TermType.REG_I: return new Optimizer(term,dist); case Term.TermType.BITSET: case Term.TermType.BITSET2: if(term.weight<=THRESHOLD) return new Optimizer(term,dist); else return Find(term.next,dist+1); case Term.TermType.ANY_CHAR: case Term.TermType.ANY_CHAR_NE: return Find(next,dist+1); case Term.TermType.REPEAT_MIN_INF: case Term.TermType.REPEAT_MIN_MAX: if(term.minCount>0){ return Find(term.target,dist); } else return null; case Term.TermType.BOUNDARY: case Term.TermType.DIRECTION: case Term.TermType.UBOUNDARY: case Term.TermType.UDIRECTION: case Term.TermType.GROUP_IN: case Term.TermType.GROUP_OUT: case Term.TermType.VOID: case Term.TermType.START: case Term.TermType.END: case Term.TermType.END_EOL: case Term.TermType.LINE_START: case Term.TermType.LINE_END: case Term.TermType.LAST_MATCH_END: case Term.TermType.CNT_SET_0: case Term.TermType.CNT_INC: case Term.TermType.CNT_GT_EQ: case Term.TermType.READ_CNT_LT: case Term.TermType.CRSTORE_CRINC: case Term.TermType.CR_SET_0: case Term.TermType.CR_LT: case Term.TermType.CR_GT_EQ: return Find(next,dist); } return null; }
internal SegmentMergeInfo(int b, TermEnum te, IndexReader r) { base_Renamed = b; reader = r; termEnum = te; term = te.Term; }
public Line_3(Term a1, Term a2, Term a3, Predicate cont) { arg1 = a1; arg2 = a2; arg3 = a3; this.cont = cont; }
public Named_Square_3(Term a1, Term a2, Term a3, Predicate cont) { arg1 = a1; arg2 = a2; arg3 = a3; this.cont = cont; }
public virtual void CreateRandomTerms(int nDocs, int nTerms, double power, Directory dir) { int[] freq = new int[nTerms]; Terms = new Term[nTerms]; for (int i = 0; i < nTerms; i++) { int f = (nTerms + 1) - i; // make first terms less frequent freq[i] = (int)Math.Ceiling(Math.Pow(f, power)); Terms[i] = new Term("f", char.ToString((char)('A' + i))); } IndexWriter iw = new IndexWriter(dir, NewIndexWriterConfig(TEST_VERSION_CURRENT, new MockAnalyzer(Random())).SetOpenMode(OpenMode.CREATE)); for (int i = 0; i < nDocs; i++) { Document d = new Document(); for (int j = 0; j < nTerms; j++) { if (Random().Next(freq[j]) == 0) { d.Add(NewStringField("f", Terms[j].Text(), Field.Store.NO)); //System.out.println(d); } } iw.AddDocument(d); } iw.ForceMerge(1); iw.Dispose(); }
/// <summary> /// 将两个term合并为一个全新的term /// </summary> /// <param name="from"></param> /// <param name="to"></param> /// <param name="termNatures"></param> /// <returns></returns> public static Term MakeNewTermNum(Term from, Term to, TermNatures termNatures) { var term = new Term(from.Name + to.Name, from.Offe, termNatures); term.TermNatures.NumAttr = @from.TermNatures.NumAttr; TermLink(term, to.To); TermLink(term.From, term); return term; }
public Free_Close_Line_4(Term a1, Term a2, Term a3, Term a4, Predicate cont) { arg1 = a1; arg2 = a2; arg3 = a3; arg4 = a4; this.cont = cont; }
internal BinaryFunc(Term left, Term right, Func<Vector, double> evaluator) : base(evaluator) { Left = left; Right = right; Left.Parent = this; Right.Parent = this; }
public dollar_dummyLogic__0_4(Term a1, Term a2, Term a3, Term a4, Predicate cont) { arg1 = a1; arg2 = a2; arg3 = a3; arg4 = a4; this.cont = cont; }
public SlowFuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, float minSimilarity, int prefixLength) : base(terms, atts, term, minSimilarity, prefixLength, false) { }
/// <summary> /// Initializes a new instance of the <see cref="RetractCondition"/> class. /// </summary> /// <param name="id">The id.</param> /// <param name="attribute">The attribute.</param> /// <param name="value">The value.</param> public RetractCondition(Term id, Term attribute, Term value) : base("RetractCondition", ConditionType.Retract, id, attribute, value) { }
/// <summary> /// Initializes a new instance of the <see cref="RetractCondition"/> class. /// </summary> /// <param name="label">The label.</param> /// <param name="id">The id.</param> /// <param name="attribute">The attribute.</param> /// <param name="value">The value.</param> public RetractCondition(string label, Term id, Term attribute, Term value) : base(label, ConditionType.Retract, id, attribute, value) { }
/// <summary> /// Calls <see cref="SlowFuzzyQuery(Term, float)">SlowFuzzyQuery(term, minimumSimilarity, 0, defaultMaxExpansions)</see>. /// </summary> public SlowFuzzyQuery(Term term, float minimumSimilarity) : this(term, minimumSimilarity, defaultPrefixLength, defaultMaxExpansions) { }
public PrefixFilter(Term prefix) : base(new PrefixQuery(prefix)) { }
/// <summary> /// Expert: highlights the top-N passages from multiple fields, /// for the provided int[] docids, to custom object as /// returned by the <see cref="PassageFormatter"/>. Use /// this API to render to something other than <see cref="string"/>. /// </summary> /// <param name="fieldsIn">field names to highlight. Must have a stored string value and also be indexed with offsets.</param> /// <param name="query">query to highlight.</param> /// <param name="searcher">searcher that was previously used to execute the query.</param> /// <param name="docidsIn">containing the document IDs to highlight.</param> /// <param name="maxPassagesIn">The maximum number of top-N ranked passages per-field used to form the highlighted snippets.</param> /// <returns> /// <see cref="T:IDictionary{string, object[]}"/> keyed on field name, containing the array of formatted snippets /// corresponding to the documents in <paramref name="docidsIn"/>. /// If no highlights were found for a document, the /// first <paramref name="maxPassagesIn"/> from the field will /// be returned. /// </returns> /// <exception cref="IOException">if an I/O error occurred during processing</exception> /// <exception cref="ArgumentException">if <c>field</c> was indexed without <see cref="IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS"/></exception> protected internal virtual IDictionary <string, object[]> HighlightFieldsAsObjects(string[] fieldsIn, Query query, IndexSearcher searcher, int[] docidsIn, int[] maxPassagesIn) { if (fieldsIn.Length < 1) { throw new ArgumentException("fieldsIn must not be empty"); } if (fieldsIn.Length != maxPassagesIn.Length) { throw new ArgumentException("invalid number of maxPassagesIn"); } IndexReader reader = searcher.IndexReader; Query rewritten = Rewrite(query); JCG.SortedSet <Term> queryTerms = new JCG.SortedSet <Term>(); rewritten.ExtractTerms(queryTerms); IndexReaderContext readerContext = reader.Context; IList <AtomicReaderContext> leaves = readerContext.Leaves; // Make our own copies because we sort in-place: int[] docids = new int[docidsIn.Length]; System.Array.Copy(docidsIn, 0, docids, 0, docidsIn.Length); string[] fields = new string[fieldsIn.Length]; System.Array.Copy(fieldsIn, 0, fields, 0, fieldsIn.Length); int[] maxPassages = new int[maxPassagesIn.Length]; System.Array.Copy(maxPassagesIn, 0, maxPassages, 0, maxPassagesIn.Length); // sort for sequential io ArrayUtil.TimSort(docids); new InPlaceMergeSorterAnonymousHelper(fields, maxPassages).Sort(0, fields.Length); // pull stored data: IList <string[]> contents = LoadFieldValues(searcher, fields, docids, maxLength); IDictionary <string, object[]> highlights = new Dictionary <string, object[]>(); for (int i = 0; i < fields.Length; i++) { string field = fields[i]; int numPassages = maxPassages[i]; Term floor = new Term(field, ""); Term ceiling = new Term(field, UnicodeUtil.BIG_TERM); // LUCENENET: Call custom GetViewBetween overload to mimic Java's exclusive upper bound behavior. var fieldTerms = queryTerms.GetViewBetween(floor, lowerValueInclusive: true, ceiling, upperValueInclusive: false); // TODO: should we have some reasonable defaults for term pruning? (e.g. stopwords) // Strip off the redundant field: BytesRef[] terms = new BytesRef[fieldTerms.Count]; int termUpto = 0; foreach (Term term in fieldTerms) { terms[termUpto++] = term.Bytes; } IDictionary <int, object> fieldHighlights = HighlightField(field, contents[i], GetBreakIterator(field), terms, docids, leaves, numPassages, query); object[] result = new object[docids.Length]; for (int j = 0; j < docidsIn.Length; j++) { fieldHighlights.TryGetValue(docidsIn[j], out result[j]); } highlights[field] = result; } return(highlights); }
public TermNode(Term term, int positionInPhrase, AtomicReader reader) : base(term, reader) { m_positionInPhrase = positionInPhrase; // relative position in a phrase }
public static ICalibrationSupportedInstrument CreateIrsInstrument( Date curveDate, RateMktData rateMktData, out MktInstrumentCalibMethod calibMethod) { MktIrsJson irsJson = null; ICalibrationSupportedInstrument irs = null; if (rateMktData.TradeInfo != null) { var irsInfo = (InterestRateSwapInfo)rateMktData.TradeInfo; var vf = new InterestRateSwapVf(irsInfo); irs = vf.GenerateInstrument(); irsJson = MktInstrumentIrsRule.MktIrsRule[irsInfo.Index.ToIndexType()]; } else { irsJson = MktInstrumentIrsRule.MktIrsRule[rateMktData.IndexType.ToIndexType()]; var irsInfo = irsJson.InterestRateSwapInfo; var calendar = irsInfo.Calendar.ToCalendarImpl(); var startDate = calendar.NextBizDay(curveDate); var isTernor = rateMktData.IsTerm(); var tenor = isTernor ? rateMktData.Tenor : null; var maturityDate = isTernor ? new Term(tenor).Next(startDate) : new Date(DateTime.Parse(rateMktData.Tenor)); var fixedLeg = new SwapLeg(startDate, maturityDate, -1.0, false, irsInfo.Currency.ToCurrencyCode(), new FixedCoupon(rateMktData.Rate), calendar, irsInfo.FixedLegFreq.ToFrequency(), irsInfo.FixedLegStub.ToStub(), irsInfo.FixedLegDC.ToDayCountImpl(), irsInfo.FixedLegBD.ToBda() ); var floatingLegFrequency = irsInfo.FloatingLegFreq.ToFrequency(); var floatingCouponResetTerm = new Term(irsInfo.ResetTerm); if (floatingCouponResetTerm.Equals(floatingLegFrequency.GetTerm())) { floatingCouponResetTerm = null; } var floatingCoupon = new FloatingCoupon( new Index(rateMktData.IndexType.ToIndexType(), 1, irsInfo.ResetCompound.ToCouponCompound()), calendar, irsInfo.FloatingLegDC.ToDayCountImpl(), 0.0, floatingCouponResetTerm, irsInfo.ResetStub.ToStub(), irsInfo.ResetBD.ToBda(), new DayGap(irsInfo.ResetToFixingGap)); var floatingLeg = new SwapLeg(startDate, maturityDate, 1.0, false, irsInfo.Currency.ToCurrencyCode(), floatingCoupon, calendar, irsInfo.FloatingLegFreq.ToFrequency(), irsInfo.FloatingLegStub.ToStub(), irsInfo.FloatingLegDC.ToDayCountImpl(), irsInfo.FloatingLegBD.ToBda() ); irs = new InterestRateSwap(fixedLeg, floatingLeg, SwapDirection.Payer, tenor); } calibMethod = irsJson.CalibrationMethod.ToCalibMethod(); return(irs); }
/// <summary> /// Return a interest rate swap. /// </summary> /// <param name="tradeId"></param> /// <param name="startDate"></param> /// <param name="maturityDate"></param> /// <param name="tenor"></param> /// <param name="notional"></param> /// <param name="currency"></param> /// <param name="swapDirection"></param> /// <param name="calendar"></param> /// <param name="fixedLegDayCount"></param> /// <param name="fixedLegFrequency"></param> /// <param name="fixedLegBusinessDayConvention"></param> /// <param name="fixedLegStub"></param> /// <param name="fixedLegCoupon"></param> /// <param name="floatingLegDayCount"></param> /// <param name="floatingLegFrequency"></param> /// <param name="floatingLegBusinessDayConvention"></param> /// <param name="floatingLegStub"></param> /// <param name="index"></param> /// <param name="resetTerm"></param> /// <param name="resetStub"></param> /// <param name="resetBusinessDayConvention"></param> /// <param name="resetToFixingGap"></param> /// <param name="resetCompound"></param> /// <returns></returns> public static object xl_InterestRateSwap(string tradeId , string startDate = null , string maturityDate = null , string tenor = "1Y" , double notional = 100.0 , string currency = "CNY" , string swapDirection = "Payer" , string calendar = "chn_ib" , string fixedLegDayCount = "Act365" , string fixedLegFrequency = "Quarterly" , string fixedLegBusinessDayConvention = "ModifiedFollowing" , string fixedLegStub = "ShortEnd" , double fixedLegCoupon = 0.03 , string floatingLegDayCount = "Act365" , string floatingLegFrequency = "Quarterly" , string floatingLegBusinessDayConvention = "ModifiedFollowing" , string floatingLegStub = "ShortEnd" , string index = "Fr007" , string resetTerm = "1W" , string resetStub = "ShortEnd" , string resetBusinessDayConvention = "None" , string resetToFixingGap = "+1BD" , string resetCompound = "Compounded" ) { var interestRateSwapInfo = XlManager.GetTrade(tradeId); if (!(interestRateSwapInfo is InterestRateSwapInfo)) { startDate = startDate ?? DateTime.Now.ToString("yyyy-MM-dd"); if (maturityDate == null && tenor != null) { maturityDate = new Term(tenor).Next(startDate.ToDate()).ToString(); } else { maturityDate = maturityDate ?? new Term("1Y").Next(startDate.ToDate()).ToString(); } string curveName = ""; switch (index) { case "Fr007": curveName = "Fr007SwapCurve"; break; case "Shibor3M": curveName = "Shibor3MSwapCurve"; break; case "Shibor1D": curveName = "ShiborONSwapCurve"; break; case "Depo1Y": curveName = "Depo1YSwapCurve"; break; } interestRateSwapInfo = new InterestRateSwapInfo(tradeId) { StartDate = startDate, MaturityDate = maturityDate, Tenor = tenor, Notional = notional, Currency = currency, SwapDirection = swapDirection, Calendar = calendar, FixedLegDC = fixedLegDayCount, FixedLegFreq = fixedLegFrequency, FixedLegBD = fixedLegBusinessDayConvention, FixedLegStub = fixedLegStub, FixedLegCoupon = fixedLegCoupon, FloatingLegDC = floatingLegDayCount, FloatingLegFreq = floatingLegFrequency, FloatingLegBD = floatingLegBusinessDayConvention, FloatingLegStub = floatingLegStub, Index = index, ResetTerm = resetTerm, ResetStub = resetStub, ResetBD = resetBusinessDayConvention, ResetToFixingGap = resetToFixingGap, ResetCompound = resetCompound, ValuationParamters = new SimpleCfValuationParameters(curveName, curveName, null) }; XlManager.AddTrades(new[] { interestRateSwapInfo }); } return(interestRateSwapInfo.ToTradeInfoInLabelData(null)); }
public override void DeployModel(object modelHost, DefinitionBase model) { var definition = model.WithAssertAndCast <TaxonomyTermDefinition>("model", value => value.RequireNotNull()); Term spObject = null; if (modelHost is TermModelHost) { var context = (modelHost as TermModelHost).HostClientContext; spObject = FindTermInTerm((modelHost as TermModelHost).HostTerm, definition); if (spObject == null && IsSharePointOnlineContext(context)) { TryRetryService.TryWithRetry(() => { spObject = FindTermInTerm((modelHost as TermModelHost).HostTerm, definition); return(spObject != null); }); } } else if (modelHost is TermSetModelHost) { var context = (modelHost as TermSetModelHost).HostClientContext; spObject = FindTermInTermSet((modelHost as TermSetModelHost).HostTermSet, definition); if (spObject == null && IsSharePointOnlineContext(context)) { TryRetryService.TryWithRetry(() => { spObject = FindTermInTermSet((modelHost as TermSetModelHost).HostTermSet, definition); return(spObject != null); }); } } else { throw new SPMeta2UnsupportedModelHostException(string.Format("Model host of type: [{0}] is not supported", modelHost.GetType())); } TermExtensions.CurrentLCID = definition.LCID; var assert = ServiceFactory.AssertService .NewAssert(definition, spObject) .ShouldNotBeNull(spObject) //.ShouldBeEqual(m => m.Name, o => o.Name) .ShouldBeEqual(m => m.Description, o => o.GetDefaultLCIDDescription()); assert.SkipProperty(m => m.LCID, "LCID is not accessible from OM. Should be alright while provision."); assert.ShouldBeEqual((p, s, d) => { var srcProp = s.GetExpressionValue(m => m.Name); var dstProp = d.GetExpressionValue(m => m.Name); var isValid = NormalizeTermName(s.Name) == d.Name; return(new PropertyValidationResult { Tag = p.Tag, Src = srcProp, Dst = dstProp, IsValid = isValid }); }); if (!string.IsNullOrEmpty(definition.CustomSortOrder)) { assert.ShouldBeEqual(m => m.CustomSortOrder, o => o.CustomSortOrder); } else { assert.SkipProperty(m => m.CustomSortOrder); } if (definition.Id.HasValue) { assert.ShouldBeEqual(m => m.Id, o => o.Id); } else { assert.SkipProperty(m => m.Id, "Id is null. Skipping property."); } if (definition.IsAvailableForTagging.HasValue) { assert.ShouldBeEqual(m => m.IsAvailableForTagging, o => o.IsAvailableForTagging); } else { assert.SkipProperty(m => m.IsAvailableForTagging, "IsAvailableForTagging is null. Skipping property."); } assert.ShouldBeEqual((p, s, d) => { var srcProp = s.GetExpressionValue(m => m.CustomProperties); var isValid = true; // missed props, or too much // should be equal on the first provision if (s.CustomProperties.Count != d.CustomProperties.Count) { isValid = false; } // per prop foreach (var customProp in s.CustomProperties) { if (!d.CustomProperties.ContainsKey(customProp.Name)) { isValid = false; break; } if (d.CustomProperties[customProp.Name] != customProp.Value) { isValid = false; break; } } return(new PropertyValidationResult { Tag = p.Tag, Src = srcProp, // Dst = dstProp, IsValid = isValid }); }); assert.ShouldBeEqual((p, s, d) => { var srcProp = s.GetExpressionValue(m => m.LocalCustomProperties); var isValid = true; // missed props, or too much // should be equal on the first provision if (s.LocalCustomProperties.Count != d.LocalCustomProperties.Count) { isValid = false; } // per prop foreach (var customProp in s.LocalCustomProperties) { if (!d.LocalCustomProperties.ContainsKey(customProp.Name)) { isValid = false; break; } if (d.LocalCustomProperties[customProp.Name] != customProp.Value) { isValid = false; break; } } return(new PropertyValidationResult { Tag = p.Tag, Src = srcProp, // Dst = dstProp, IsValid = isValid }); }); }
/// <summary> /// Calls <see cref="SlowFuzzyQuery(Term, float)">SlowFuzzyQuery(term, defaultMinSimilarity, 0, defaultMaxExpansions)</see>. /// </summary> public SlowFuzzyQuery(Term term) : this(term, defaultMinSimilarity, defaultPrefixLength, defaultMaxExpansions) { }
public TermAndShardVersion(int nodeID, long version, Term term) { this.nodeID = nodeID; this.version = version; this.term = term; }
public bool EditTerm(Term termToEdit) { return(Service.EditTerm(termToEdit)); }
public static Term LookupTerm(ClientContext clientContext, TermStore termStore, TermSet termSet, TaxonomyFieldDefinition termModel) { var context = clientContext; var site = clientContext.Site; Term result = null; TermGroup currenGroup = null; var termGroupName = termModel.TermGroupName; var termGroupId = termModel.TermGroupId; var isSiteCollectionGroup = termModel.IsSiteCollectionGroup; if (!string.IsNullOrEmpty(termGroupName)) { currenGroup = termStore.Groups.GetByName(termGroupName); context.Load(currenGroup); context.ExecuteQueryWithTrace(); } else if (termGroupId != null && termGroupId.HasGuidValue()) { currenGroup = termStore.Groups.GetById(termGroupId.Value); context.Load(currenGroup); context.ExecuteQueryWithTrace(); } else if (isSiteCollectionGroup == true) { currenGroup = termStore.GetSiteCollectionGroup(site, false); context.Load(currenGroup); context.ExecuteQueryWithTrace(); } if (currenGroup != null) { if (termModel.TermId.HasValue) { // by ID, the only one match var scope = new ExceptionHandlingScope(context); using (scope.StartScope()) { using (scope.StartTry()) { result = termStore.GetTerm(termModel.TermId.Value); context.Load(result); } using (scope.StartCatch()) { } } context.ExecuteQueryWithTrace(); } else if (!string.IsNullOrEmpty(termModel.TermName)) { var terms = termStore.GetTerms(new LabelMatchInformation(context) { Lcid = termModel.TermLCID, TermLabel = termModel.TermName, TrimUnavailable = false }); context.Load(terms, t => t.Include( i => i.Id, i => i.Name, i => i.TermSet, i => i.TermSet.Group, i => i.TermSet.Group.Name )); context.ExecuteQueryWithTrace(); result = terms.FirstOrDefault(t => t.TermSet.Group.Name == currenGroup.Name); if ((result == null) && (termSet != null)) // sometimes label match information does not return the term { var allTerms = termSet.GetAllTerms(); context.Load(allTerms, t => t.Include( i => i.Id, i => i.Name, i => i.TermSet, i => i.TermSet.Group, i => i.TermSet.Group.Name, i => i.Labels )); context.ExecuteQueryWithTrace(); result = allTerms.FirstOrDefault(t => (t.TermSet.Group.Name == currenGroup.Name) && (t.Labels.Any(l => l.Value == termModel.TermName && l.Language == termModel.TermLCID))); } } } else { if (termModel.TermId.HasValue) { var scope = new ExceptionHandlingScope(context); using (scope.StartScope()) { using (scope.StartTry()) { result = termStore.GetTerm(termModel.TermId.Value); context.Load(result); } using (scope.StartCatch()) { } } context.ExecuteQueryWithTrace(); } else if (!string.IsNullOrEmpty(termModel.TermName)) { var terms = termStore.GetTerms(new LabelMatchInformation(context) { Lcid = termModel.TermLCID, TermLabel = termModel.TermName, TrimUnavailable = false }); context.Load(terms); context.ExecuteQueryWithTrace(); result = terms.FirstOrDefault(); if ((result == null) && (termSet != null)) // sometimes label match information does not return the termset { var allTerms = termSet.GetAllTerms(); context.Load(allTerms, t => t.Include( i => i.Id, i => i.Name, i => i.TermSet, i => i.TermSet.Group, i => i.TermSet.Group.Name, i => i.Labels )); context.ExecuteQueryWithTrace(); result = allTerms.FirstOrDefault( t => (t.Labels.Any(l => l.Value == termModel.TermName && l.Language == termModel.TermLCID))); } } } if (result != null && result.ServerObjectIsNull == false) { context.Load(result); context.ExecuteQueryWithTrace(); return(result); } return(null); }
public bool CreateTerm(Term termToCreate) { return(Service.CreateTerm(termToCreate)); }
/// <summary> /// Compares the fields before checking the text of the terms. /// </summary> /// <param name="term"> /// the given term. </param> /// <param name="termIndex"> /// the term that exists in the data block. </param> /// <param name="input"> /// the data block. </param> /// <returns> int. </returns> /// <exception cref="IOException"> If there is a low-level I/O error. </exception> private int CompareField(Term term, int termIndex, PagedBytesDataInput input) { input.Position = IndexToDataOffset.Get(termIndex); return(term.Field().CompareTo(Fields[input.ReadVInt()].Field())); }
/// <summary> /// The compares the given term against the term in the index specified by the /// term index. ie It returns negative N when term is less than index term; /// </summary> /// <param name="term"> /// the given term. </param> /// <param name="termIndex"> /// the index of the of term to compare. </param> /// <returns> int. </returns> /// <exception cref="IOException"> If there is a low-level I/O error. </exception> public virtual int CompareTo(Term term, int termIndex) { return(CompareTo(term, termIndex, (PagedBytesDataInput)DataInput.Clone(), new BytesRef())); }
public RootNode(string title, Uri uri, Term term) : base(title, term) { _uri = uri; GlossaryNode = new GlossaryNode(this); }
public MulTerm(Term term, Num num) { _term = term; _num = num; }
public override a_Fuzzy_System Generate(Fuzzy_system.Approx_Singletone.a_Fuzzy_System Approximate, Abstract_generator_conf config) { Random rand = new Random(); a_Fuzzy_System result = Approximate; if (result.Count_Rulles_Databases == 0) { Knowlege_base_ARules temp_rules = new Knowlege_base_ARules(); result.Rulles_Database_Set.Add(temp_rules); } type_term = ((Generator_Rulles_simple_random_conf)config).Функция_принадлежности; stable_terms = (int)((Generator_Rulles_simple_random_conf)config).Тип_Термов; count_rules = ((Generator_Rulles_simple_random_conf)config).Количество_правил; for (int j = 0; j < count_rules; j++) { int[] order = new int[result.Count_Vars]; Type_Term_Func_Enum temp_type_term; if (stable_terms == 0) { temp_type_term = type_term; } else { temp_type_term = Generator_type_term(); } List <Term> temp_term_list = new List <Term>(); for (int k = 0; k < result.Count_Vars; k++) { double[] parametrs = new double[Member_Function.Count_Params_For_Term(temp_type_term)]; switch (temp_type_term) { case Type_Term_Func_Enum.Треугольник: parametrs[0] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[1] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[2] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); Array.Sort(parametrs); break; case Type_Term_Func_Enum.Гауссоида: parametrs[0] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[1] = (rand.NextDouble() + 0.01) * 0.5 * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); break; case Type_Term_Func_Enum.Парабола: parametrs[0] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[1] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); Array.Sort(parametrs); break; case Type_Term_Func_Enum.Трапеция: parametrs[0] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[1] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[2] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); parametrs[3] = result.Learn_Samples_set.Attribute_Min(k) + rand.NextDouble() * (result.Learn_Samples_set.Attribute_Max(k) - result.Learn_Samples_set.Attribute_Min(k)); Array.Sort(parametrs); break; } Term temp_term = new Term(parametrs, temp_type_term, k); result.Rulles_Database_Set[0].Terms_Set.Add(temp_term); temp_term_list.Add(temp_term); order[k] = result.Rulles_Database_Set[0].Terms_Set.Count - 1; } double approx_Value = result.Nearest_Approx(temp_term_list); ARule temp_Rule = new ARule(result.Rulles_Database_Set[0].Terms_Set, order, approx_Value); result.Rulles_Database_Set[0].Rules_Database.Add(temp_Rule); } result.unlaid_protection_fix(); GC.Collect(); return(result); }
public SubExpression(Expression expr, Term term) { _expr = expr; _term = term; }
public DivTerm(Term term, Num num) { _term = term; _num = num; }
/// <summary> /// The add term row view model. /// </summary> /// <param name="term"> /// The term. /// </param> private void AddTermRowViewModel(Term term) { var row = new TermRowViewModel(term, this.Session, this); this.ContainedRows.Add(row); }
protected virtual void AddOptionalArguments(Term updateTerm) { }
public static void Rule(Expression result, Term term) { result.Value = term.Value; }
// Random rnd is passed in so that the exact same random query may be created // more than once. public static BooleanQuery RandBoolQuery(Random rnd, bool allowMust, int level, string field, string[] vals, Callback cb) { BooleanQuery current = new BooleanQuery(rnd.Next() < 0); for (int i = 0; i < rnd.Next(vals.Length) + 1; i++) { int qType = 0; // term query if (level > 0) { qType = rnd.Next(10); } Query q; if (qType < 3) { q = new TermQuery(new Term(field, vals[rnd.Next(vals.Length)])); } else if (qType < 4) { Term t1 = new Term(field, vals[rnd.Next(vals.Length)]); Term t2 = new Term(field, vals[rnd.Next(vals.Length)]); PhraseQuery pq = new PhraseQuery(); pq.Add(t1); pq.Add(t2); pq.Slop = 10; // increase possibility of matching q = pq; } else if (qType < 7) { q = new WildcardQuery(new Term(field, "w*")); } else { q = RandBoolQuery(rnd, allowMust, level - 1, field, vals, cb); } int r = rnd.Next(10); Occur occur; if (r < 2) { occur = Occur.MUST_NOT; } else if (r < 5) { if (allowMust) { occur = Occur.MUST; } else { occur = Occur.SHOULD; } } else { occur = Occur.SHOULD; } current.Add(q, occur); } if (cb != null) { cb.PostCreate(current); } return(current); }
/// <summary> /// Construct a <see cref="SpanTermQuery"/> matching the named term's spans. </summary> public SpanTermQuery(Term term) { this.m_term = term; }
public static Dictionary <string, object> GetFieldValues(PnP.Core.Model.SharePoint.IList list, Hashtable valuesToSet, ClientContext clientContext) { var item = new Dictionary <string, object>(); // xxx: return early if hashtable is empty to save getting fields? var fields = list.Fields; Hashtable values = valuesToSet ?? new Hashtable(); foreach (var key in values.Keys) { var field = fields.FirstOrDefault(f => f.InternalName == key as string || f.Title == key as string); if (field != null) { switch (field.TypeAsString) { case "User": case "UserMulti": { var userValueCollection = field.NewFieldValueCollection(); var value = values[key]; if (value == null) { goto default; } if (value is string && string.IsNullOrWhiteSpace(value + "")) { goto default; } if (value.GetType().IsArray) { foreach (var arrayItem in (value as IEnumerable)) { int userId; if (!int.TryParse(arrayItem.ToString(), out userId)) { var user = list.PnPContext.Web.EnsureUser(arrayItem as string); userValueCollection.Values.Add(field.NewFieldUserValue(user)); } else { userValueCollection.Values.Add(field.NewFieldUserValue(userId)); } } item[key as string] = userValueCollection; } else { int userId; if (!int.TryParse(value as string, out userId)) { var user = list.PnPContext.Web.EnsureUser(value as string); item[key as string] = field.NewFieldUserValue(user); } else { item[key as string] = field.NewFieldUserValue(userId); } } break; } case "TaxonomyFieldType": case "TaxonomyFieldTypeMulti": { var value = values[key]; if (value != null && value.GetType().IsArray) { var fieldValueCollection = field.NewFieldValueCollection(); var taxSession = clientContext.Site.GetTaxonomySession(); foreach (var arrayItem in value as object[]) { Term taxonomyItem; Guid termGuid; var label = string.Empty; if (!Guid.TryParse(arrayItem as string, out termGuid)) { // Assume it's a TermPath taxonomyItem = clientContext.Site.GetTaxonomyItemByPath(arrayItem as string) as Term; if (taxonomyItem == null) { throw new PSInvalidOperationException($"Cannot find term {arrayItem}"); } var labelResult = taxonomyItem.GetDefaultLabel(CultureInfo.CurrentCulture.LCID); clientContext.ExecuteQueryRetry(); label = labelResult.Value; } else { taxonomyItem = taxSession.GetTerm(termGuid); if (taxonomyItem == null) { throw new PSInvalidOperationException($"Cannot find term {arrayItem}"); } var labelResult = taxonomyItem.GetDefaultLabel(CultureInfo.CurrentCulture.LCID); clientContext.Load(taxonomyItem); clientContext.ExecuteQueryRetry(); label = labelResult.Value; } fieldValueCollection.Values.Add(field.NewFieldTaxonomyValue(taxonomyItem.Id, label)); } item[key as string] = fieldValueCollection; } else { Guid termGuid = Guid.Empty; var taxSession = clientContext.Site.GetTaxonomySession(); Term taxonomyItem = null; var label = string.Empty; if (value != null && !Guid.TryParse(value as string, out termGuid)) { // Assume it's a TermPath taxonomyItem = clientContext.Site.GetTaxonomyItemByPath(value as string) as Term; var labelResult = taxonomyItem.GetDefaultLabel(CultureInfo.CurrentCulture.LCID); clientContext.ExecuteQueryRetry(); label = labelResult.Value; } else { if (value != null) { taxonomyItem = taxSession.GetTerm(termGuid); var labelResult = taxonomyItem.GetDefaultLabel(CultureInfo.CurrentCulture.LCID); clientContext.Load(taxonomyItem); clientContext.ExecuteQueryRetry(); label = labelResult.Value; } } item[key as string] = field.NewFieldTaxonomyValue(taxonomyItem.Id, label); } break; } case "Lookup": case "LookupMulti": { var value = values[key]; if (value == null) { goto default; } int[] multiValue; if (value is Array) { var fieldValueCollection = field.NewFieldValueCollection(); var arr = (object[])values[key]; for (int i = 0; i < arr.Length; i++) { var arrayValue = arr[i].ToString(); fieldValueCollection.Values.Add(field.NewFieldLookupValue(int.Parse(arrayValue))); } item[key as string] = fieldValueCollection; } else { var fieldValueCollection = field.NewFieldValueCollection(); string valStr = values[key].ToString(); multiValue = valStr.Split(',', ';').Select(int.Parse).ToArray(); if (multiValue.Length > 1) { for (int i = 0; i < multiValue.Length; i++) { fieldValueCollection.Values.Add(field.NewFieldLookupValue(multiValue[i])); } item[key as string] = fieldValueCollection; } else { item[key as string] = field.NewFieldLookupValue(multiValue[0]); } } break; } default: { item[key as string] = values[key]; break; } } } else { throw new PSInvalidOperationException($"Field {key} not present in list."); } } return(item); }
public static void Rule(Expression result, Expression expression, ExpressionOperator op, Term term) { result.Value = op.Function(expression.Value, term.Value); }
public TermNode(Term term, AtomicReader reader) : this(term, 0, reader) { }