Exemple #1
0
        public static ParserResponse Parse(string input)
        {
            var t = new Tokeniser(input);

            // check if all words are ignored and thus unusable
            if (t.WordCount <= t.IgnoreCount)
            {
                return(new ParserResponse(t, message: new ResponseMessage("I don't understand that.", ResponseMessageType.Warning)));
            }

            // check if the input has no command word
            if (String.IsNullOrEmpty(t.Command.Word))
            {
                return(new ParserResponse(t, message: new ResponseMessage("You need to tell me what to do.", ResponseMessageType.Warning)));
            }

            // find a Command that matches the command word from the token
            ICommand        command;
            ResponseMessage message;

            try
            {
                command = CommandManager.GetCommand(t.Command.Word);
                message = null;
            } catch (CommandNotFoundException cnfe)
            {
                command = null;
                message = new ResponseMessage(cnfe.Message, ResponseMessageType.Error);
            }

            // pass it back
            return(new ParserResponse(t, command, message));
        }
Exemple #2
0
        public void CorrectTokenStatuses()
        {
            Dictionary <string, object[]> tests = new Dictionary <string, object[]>();

            tests.Add("get the ball north", new object[] { TokenType.Command, TokenType.Ignored, TokenType.Unrecognised, TokenType.Direction });
            tests.Add("go back", new object[] { TokenType.Command, TokenType.Direction });
            tests.Add("ball ball get ball", new object[] { TokenType.Unrecognised, TokenType.Unrecognised, TokenType.Command, TokenType.Unrecognised });

            foreach (KeyValuePair <string, object[]> kvp in tests)
            {
                Tokeniser ts = new Tokeniser(kvp.Key);
                int       ii = 0;
                foreach (Token t in ts)
                {
                    //Console.Out.WriteLine(ii.ToString() + ": " + t.Word + "/" + t.Status.ToString());
                    Assert.That(t.Type, Is.EqualTo(kvp.Value[ii]));
                    ii++;
                }
            }
        }
Exemple #3
0
        public void Tokenise()
        {
            Dictionary <string, string[]> tests = new Dictionary <string, string[]>();

            tests.Add("this is a test", new string[] { "this", "is", "a", "test" });
            tests.Add("this, too, is-also a_test", new string[] { "this", "too", "is-also", "a", "test" });

            foreach (KeyValuePair <string, string[]> kvp in tests)
            {
                Tokeniser ts = new Tokeniser(kvp.Key);

                Assert.That(ts.WordCount, Is.EqualTo(kvp.Value.Length));
                int ii = 0;
                foreach (Token t in ts)
                {
                    //Console.Out.WriteLine(ii.ToString() + ": " + t.Word + "/" + t.Status.ToString());
                    Assert.That(t.Word, Is.EqualTo(kvp.Value[ii]));
                    ii++;
                }
                Assert.That(ii, Is.EqualTo(kvp.Value.Length));
            }
        }
Exemple #4
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="tokens">Tokens from the input</param>
 /// <param name="command"></param>
 /// <param name="message"></param>
 public ParserResponse(Tokeniser tokens, ICommand command = null, ResponseMessage message = null)
 {
     Tokens  = tokens;
     Command = command;
     Message = message;
 }
Exemple #5
0
 public TokenEnumerator(Tokeniser t)
 {
     this.t   = t;
     position = -1;
 }
Exemple #6
0
 /// <summary>
 /// Process the command
 /// </summary>
 /// <param name="engine"></param>
 /// <param name="tokens"></param>
 /// <returns></returns>
 protected abstract Response ProcessInternal(Engine engine, Tokeniser tokens);
Exemple #7
0
 /// <summary>
 /// Process the command
 /// </summary>
 /// <param name="engine"></param>
 /// <param name="tokens"></param>
 /// <returns></returns>
 public Response Process(Engine engine, Tokeniser tokens)
 {
     return(ProcessInternal(engine, tokens));
 }