/// <inheritdoc/>
        public void Process(IVersionContext context)
        {
            Assert.ArgumentNotNull(context, nameof(context));

            var rules = new ITokenRule <string>[]
            {
                new HeightTokenRule(true),
                ShortShaTokenRule.Instance,
                BranchNameTokenRule.Instance,
                ShortBranchNameTokenRule.Instance,
                BranchNameSuffixTokenRule.Instance
            };

            var labelParts = context.Configuration.Label.ApplyTokenRules(context, rules);
            var label      = string.Join("-", labelParts).ResolveTokenRules(context, rules);

            var format = context.Result.Version;

            if (!string.IsNullOrWhiteSpace(label))
            {
                format += $"-{label}";
            }

            context.Result.Formats[FormatKey] = format;
        }
        /// <inheritdoc/>
        public void Apply(IVersionContext context)
        {
            var rules = new ITokenRule <string>[]
            {
                HeightTokenRule.Instance,
                ShortShaTokenRule.Instance,
                BranchNameTokenRule.Instance,
                ShortBranchNameTokenRule.Instance,
                BranchNameSuffixTokenRule.Instance
            };

            var labelParts = context.Configuration.Label.ApplyTokenRules(context, rules);
            var label      = string.Join(".", labelParts).ResolveTokenRules(context, rules);
            var meta       = string.Join(".", context.Configuration.Metadata).ResolveTokenRules(context, rules);

            var format = context.Result.Version;

            if (!string.IsNullOrWhiteSpace(label))
            {
                format += $"-{label}";
            }

            if (!string.IsNullOrWhiteSpace(meta))
            {
                format += $"+{meta}";
            }

            context.Result.Formats[FormatKey] = format;
        }
Beispiel #3
0
            public object ParseValue(TextTokenizer tokenizer, ITokenRule endToken)
            {
                var required = endToken == null ? new [] { word, text, startObject, startArray } : new [] { word, text, startObject, startArray, endToken };

                var token = tokenizer.NextToken(required: required);

                if (token.Rule == word)
                {
                    var boolValue   = false;
                    var numberValue = 0.0d;

                    if (bool.TryParse(token.Text, out boolValue))
                    {
                        return(boolValue);
                    }

                    if (double.TryParse(token.Text, out numberValue))
                    {
                        return(numberValue);
                    }

                    throw new NotSupportedException("not supported token");
                }
                else if (token.Rule == text)
                {
                    return(token.Text);
                }
                else if (token.Rule == startArray)
                {
                    var list = new List <object> ();

                    object val = null;

                    while ((val = ParseValue(tokenizer, endArray)) != null)
                    {
                        list.Add(val);

                        if (tokenizer.NextToken(required: new [] { endArray, comma }).Rule == endArray)
                        {
                            break;
                        }
                    }

                    return(list.ToArray());
                }
                else if (token.Rule == startObject)
                {
                    var obj = new Dictionary <string, object> ();

                    do
                    {
                        var prop = tokenizer.NextToken(new [] { word, endObject });

                        if (prop.Rule == endObject)
                        {
                            break;
                        }

                        tokenizer.NextToken(new [] { colon });

                        obj.Add(prop.Text, ParseValue(tokenizer, null));

                        token = tokenizer.NextToken(new [] { comma, endObject });
                    } while(token.Rule != endObject);

                    return(obj);
                }

                return(null);
            }