Exemplo n.º 1
0
        /// <summary>
        /// Represents numeric fields mapping.
        /// see http://www.elasticsearch.org/guide/reference/mapping/core-types.html
        /// </summary>
        public Properties <T> Number <TField>(Expression <Func <T, TField> > field, Func <NumberMap <T>, NumberMap <T> > numberProperty = null)
        {
            var fieldName = field.GetPropertyPath();

            RegisterMapAsJson(numberProperty.Bind(map => map.Field(fieldName, typeof(TField))));
            return(this);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Represents custom property mapping mapping.
        /// </summary>
        public Properties <T> CustomProperty <TField>(Expression <Func <T, TField> > field, Func <CustomPropertyMap <T>, CustomPropertyMap <T> > customProperty = null)
        {
            var fieldName = field.GetPropertyPath();

            RegisterMapAsJson(customProperty.Bind(map => map.Field(fieldName, typeof(TField))));
            return(this);
        }
Exemplo n.º 3
0
        internal static void _main()
        {
            Func <string, string> firstThing
                = name => $"First, {name} looked up";

            Func <string, string> secondThing
                = name => $"Then, {name} turned to me...";

            var reader = from first in firstThing
                         from second in secondThing
                         select $"{first}\n{second}";

            // same as above, but using explicit arguments, instead of LINQ
            var reader1 = firstThing
                          .Bind(first => secondThing
                                .Map(second => $"{first}\n{second}"));


            var story = reader("Tamerlano");

            // => First, Tamerlano looked up
            //    Then, Tamerlano turned to me...

            WriteLine(story);
        }
        private Func <Domain> CreateDomainBuilder(UpgradeContext context)
        {
            var buildingConfiguration = BuildBuilderConfiguration(context);

            Func <DomainBuilderConfiguration, Domain> builder = DomainBuilder.Run;

            return(builder.Bind(buildingConfiguration));
        }
Exemplo n.º 5
0
        public void FirstTestName()
        {
            Func <A, B> aToB = a => new B(a);
            Func <B, C> bToC = b => new C(b);

            Func <A, C> aToC = aToB.Bind(bToC);

            C c = aToC(new A());
        }
Exemplo n.º 6
0
		public void CallNow()
		{
			var now = DateTime.Now.AddYears(-2);
			var myNow = new Func<DateTime>(() => now);

			using (myNow.Bind(() => SystemTime.UtcNow))
			{
				Assert.AreEqual(now, SystemTime.UtcNow());
			}

			Assert.AreNotEqual(now, SystemTime.UtcNow());
		}
Exemplo n.º 7
0
        static void Main(string[] args)
        {
            // Currying example:
            Func <int, int, int> multiply = (a, b) => a * b;

            // This is equivalent to calling multiply(int a) with b = 2
            var timesTwo = multiply.Bind(2);

            // This will print "4"
            Console.WriteLine(timesTwo(2));

            // Method chaining example:
            Action <string> writeLine = Console.WriteLine;

            // Combine the writeLine call from two separate calls
            // into a single Action delegate
            var helloWorld = writeLine.Bind("Hello, ")
                             .Then(writeLine.Bind("World!"));

            helloWorld();

            // Convert multiple functions with same input to a single function with multiple outputs
            Func <int, int> incr = x => x + 1;
            var             fns = new[] { incr, timesTwo }.Sequence();

            Show(fns(10)); // writes "11,20"

            // Compose with LINQ
            var f =
                from next in incr
                from dbl in timesTwo
                select Tuple.Create(next, dbl);

            var showF = f.Select(x => x.ToString());

            writeLine(showF(10)); // writes "(11, 20)"

            // Partial Application example:
            Func <int, int, int, int> addThreeIntegers = (a, b, c) => a + b + c;

            // This call to Partial<TDelegate>() leaves the second parameter open
            // and binds parameters "a" and "c" to the value of "2"
            var addOneInteger = addThreeIntegers.Partial <Func <int, int> >(2, Args.Open, 2);
            var result        = addOneInteger(1);

            writeLine(result.ToString(CultureInfo.InvariantCulture));

            Console.ReadKey();
        }
Exemplo n.º 8
0
        private Func <Domain> CreateBuilder(UpgradeStage stage)
        {
            var configuration = new DomainBuilderConfiguration {
                DomainConfiguration = context.Configuration,
                Stage                = stage,
                Services             = context.Services,
                ModelFilter          = new StageModelFilter(context.UpgradeHandlers, stage),
                UpgradeContextCookie = context.Cookie,
                RecycledDefinitions  = context.RecycledDefinitions,
                DefaultSchemaInfo    = defaultSchemaInfo
            };

            configuration.Lock();
            Func <DomainBuilderConfiguration, Domain> builder = DomainBuilder.Run;

            return(builder.Bind(configuration));
        }
 /// <summary>
 /// A token filter of type shingle that constructs shingles (token n-grams) from a token stream.
 /// In other words, it creates combinations of tokens as a single token.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/shingle-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Shingle(string name, Func <ShingleTokenFilter, ShingleTokenFilter> shingle = null)
 {
     RegisterJsonPartExpression(shingle.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter that stems words using a Snowball-generated stemmer.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/snowball-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Snowball(string name, Func <SnowballTokenFilter, SnowballTokenFilter> snowball = null)
 {
     RegisterJsonPartExpression(snowball.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type porterStem that transforms the token stream as per the Porter stemming algorithm.
 /// Note, the input to the stemming filter must already be in lower case, so you will need to use Lower Case Token Filter
 /// or Lower Case Tokenizer farther down the Tokenizer chain in order for this to work properly!
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/porterstem-tokenfilter.html
 /// </summary>
 public TokenFilterSettings PorterStem(string name, Func <PorterStemTokenFilter, PorterStemTokenFilter> porterStem = null)
 {
     RegisterJsonPartExpression(porterStem.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type reverse that simply reverses the tokens.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/reverse-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Reverse(string name, Func <ReverseTokenFilter, ReverseTokenFilter> reverse = null)
 {
     RegisterJsonPartExpression(reverse.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
Exemplo n.º 13
0
 /// <summary>
 /// An analyzer of type custom that allows to combine a Tokenizer with zero or more Token Filters, and zero or more Char Filters.
 /// The custom analyzer accepts a logical/registered name of the tokenizer to use, and a list of logical/registered names of token filters.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/custom-analyzer.html
 /// </summary>
 public AnalyzerSettings Custom(string name, Func <CustomAnalyzer, CustomAnalyzer> custom)
 {
     RegisterJsonPartExpression(custom.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A phonetic analysis token filter plugin.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/phonetic-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Phonetic(string name, Func <PhoneticTokenFilter, PhoneticTokenFilter> phonetic = null)
 {
     RegisterJsonPartExpression(phonetic.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
Exemplo n.º 15
0
 /// <summary>
 /// An analyzer of type pattern that can flexibly separate text into terms via a regular expression.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/pattern-analyzer.html
 /// </summary>
 public AnalyzerSettings Pattern(string name, Func <PatternAnalyzer, PatternAnalyzer> pattern = null)
 {
     RegisterJsonPartExpression(pattern.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
 /// <summary>
 /// The synonym token filter allows to easily handle synonyms during the analysis process.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/synonym-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Synonym(string name, Func <SynonymTokenFilter, SynonymTokenFilter> synonym = null)
 {
     RegisterJsonPartExpression(synonym.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// The unique token filter can be used to only index unique tokens during analysis.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/unique-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Unique(string name, Func <UniqueTokenFilter, UniqueTokenFilter> unique = null)
 {
     RegisterJsonPartExpression(unique.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type edgeNGram that builds N-characters substrings from text. Substrings are built from one side of a text.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/edgengram-tokenfilter.html
 /// </summary>
 public TokenFilterSettings EdgeNGram(string name, Func <EdgeNGramTokenFilter, EdgeNGramTokenFilter> edgeNGram = null)
 {
     RegisterJsonPartExpression(edgeNGram.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
Exemplo n.º 19
0
 /// <summary>
 /// A set of analyzers aimed at analyzing specific language text.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/lang-analyzer.html
 /// </summary>
 public AnalyzerSettings Language(string name, Func <LanguageAnalyzer, LanguageAnalyzer> language)
 {
     RegisterJsonPartExpression(language.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
Exemplo n.º 20
0
 /// <summary>
 /// An analyzer of type whitespace that is built using a Whitespace Tokenizer.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/whitespace-analyzer.html
 /// </summary>
 public AnalyzerSettings Whitespace(string name, Func <WhitespaceAnalyzer, WhitespaceAnalyzer> whitespace = null)
 {
     RegisterJsonPartExpression(whitespace.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
Exemplo n.º 21
0
 /// <summary>
 /// An analyzer of type simple that is built using a Lower Case Tokenizer.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/simple-analyzer.html
 /// </summary>
 public AnalyzerSettings Simple(string name, Func <SimpleAnalyzer, SimpleAnalyzer> simple = null)
 {
     RegisterJsonPartExpression(simple.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type dictionary_decompounder that allows to decompose compound words.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/compound-word-tokenfilter.html
 /// </summary>
 public TokenFilterSettings DictionaryDecompounder(string name, Func <DictionaryDecompounderTokenFilter, DictionaryDecompounderTokenFilter> dictionaryDecompounder = null)
 {
     RegisterJsonPartExpression(dictionaryDecompounder.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type lowercase that normalizes token text to lower case.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/lowercase-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Lowercase(string name, Func <LowercaseTokenFilter, LowercaseTokenFilter> lowercase = null)
 {
     RegisterJsonPartExpression(lowercase.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type stop that removes stop words from token streams.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/stop-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Stop(string name, Func <StopTokenFilter, StopTokenFilter> stop = null)
 {
     RegisterJsonPartExpression(stop.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// The pattern_replace token filter allows to easily handle string replacements based on a regular expression.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/pattern_replace-tokenfilter.html
 /// </summary>
 public TokenFilterSettings PatternReplace(string name, Func <PatternReplaceTokenFilter, PatternReplaceTokenFilter> patternReplace = null)
 {
     RegisterJsonPartExpression(patternReplace.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// The truncate token filter can be used to truncate tokens into a specific length.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/truncate-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Truncate(string name, Func <TruncateTokenFilter, TruncateTokenFilter> truncate = null)
 {
     RegisterJsonPartExpression(truncate.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
Exemplo n.º 27
0
 /// <summary>
 /// An analyzer of type snowball that uses the standard tokenizer, with standard filter, lowercase filter, stop filter, and snowball filter.
 /// The Snowball Analyzer is a stemming analyzer from Lucene that is originally based on the snowball project from snowball.tartarus.org.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/snowball-analyzer.html
 /// </summary>
 public AnalyzerSettings Snowball(string name, Func <SnowballAnalyzer, SnowballAnalyzer> snowball = null)
 {
     RegisterJsonPartExpression(snowball.Bind(analyzer => analyzer.Name(name)));
     return(this);
 }
 /// <summary>
 /// Named word_delimiter, it splits words into subwords and performs optional transformations on subword groups.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/word-delimiter-tokenfilter.html
 /// </summary>
 public TokenFilterSettings WordDelimiter(string name, Func <WordDelimiterTokenFilter, WordDelimiterTokenFilter> wordDelimiter = null)
 {
     RegisterJsonPartExpression(wordDelimiter.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type length that removes words that are too long or too short for the stream.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/length-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Length(string name, Func <LengthTokenFilter, LengthTokenFilter> length = null)
 {
     RegisterJsonPartExpression(length.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter which removes elisions. For example, "l'avion" (the plane) will be tokenized as "avion" (plane).
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/elision-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Elision(string name, Func <ElisionTokenFilter, ElisionTokenFilter> elision = null)
 {
     RegisterJsonPartExpression(elision.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }
 /// <summary>
 /// A token filter of type asciifolding that converts alphabetic, numeric, and symbolic Unicode characters
 /// which are not in the first 127 ASCII characters (the "Basic Latin" Unicode block) into their ASCII equivalents, if one exists.
 /// see http://www.elasticsearch.org/guide/reference/index-modules/analysis/asciifolding-tokenfilter.html
 /// </summary>
 public TokenFilterSettings Asciifolding(string name, Func <AsciifoldingTokenFilter, AsciifoldingTokenFilter> asciifolding = null)
 {
     RegisterJsonPartExpression(asciifolding.Bind(tokenizer => tokenizer.Name(name)));
     return(this);
 }