{
Expression oe = new Expression ();
oe.Parse (token.Value, ParseOptions.AllowItemsMetadataAndSplit);
- return new Token ((string) oe.ConvertTo (context, typeof (string)), token.Type);
+ return new Token ((string) oe.ConvertTo (context, typeof (string)), token.Type, token.Position);
}
}
}
ConditionExpression e = parser.ParseExpression ();
if (!parser.tokenizer.IsEOF ())
- throw new ExpressionParseException (String.Format ("Unexpected token at end of condition: \"{0}\"", parser.tokenizer.Token.Value));
+ throw new ExpressionParseException (String.Format ("Unexpected token found, {0}, in condition \"{1}\"", parser.tokenizer.Token, condition));
return e;
}
} else if (token.Type == TokenType.Not) {
e = ParseNotExpression ();
} else
- throw new ExpressionParseException (String.Format ("Unexpected token type {0}, while parsing {1}", token.Type, conditionStr));
+ throw new ExpressionParseException (String.Format ("Unexpected token {0}, while parsing condition \"{1}\"", token, conditionStr));
return e;
}
{
StringBuilder sb = new StringBuilder ();
- ExpectToken (TokenType.LeftParen);
+ string ref_type = prefix [0] == '$' ? "a property" : "an item list";
+ int token_pos = tokenizer.Token.Position;
+ IsAtToken (TokenType.LeftParen, String.Format (
+ "Expected {0} at position {1} in condition \"{2}\". Missing opening parantheses after the '{3}'.",
+ ref_type, token_pos, conditionStr, prefix));
tokenizer.GetNextToken ();
sb.AppendFormat ("{0}({1}", prefix, tokenizer.Token.Value);
}
}
- ExpectToken (TokenType.RightParen);
+ IsAtToken (TokenType.RightParen, String.Format (
+ "Expected {0} at position {1} in condition \"{2}\". Missing closing parantheses'.",
+ ref_type, token_pos, conditionStr, prefix));
tokenizer.GetNextToken ();
sb.Append (")");
//FIXME: HACKY!
- return new ConditionFactorExpression (new Token (sb.ToString (), TokenType.String));
+ return new ConditionFactorExpression (new Token (sb.ToString (), TokenType.String, token_pos));
}
- void ExpectToken (TokenType type)
+ // used to check current token type
+ void IsAtToken (TokenType type, string error_msg)
{
- if (tokenizer.Token.Type != type)
- throw new ExpressionParseException ("Expected token type of type: " + type + ", got " +
- tokenizer.Token.Type + " (" + tokenizer.Token.Value + "), while parsing " + conditionStr);
+ if (tokenizer.Token.Type != type) {
+ if (!String.IsNullOrEmpty (error_msg))
+ throw new ExpressionParseException (error_msg);
+
+ if (tokenizer.Token.Type == TokenType.EOF)
+ throw new ExpressionParseException (String.Format (
+ "Expected a \"{0}\" but the condition ended abruptly, while parsing condition \"{1}\"",
+ Token.TypeAsString (type), conditionStr));
+
+ throw new ExpressionParseException (String.Format (
+ "Expected \"{0}\" token, but got {1}, while parsing \"{2}\"",
+ Token.TypeAsString (type), tokenizer.Token, conditionStr));
+ }
}
}
}
this.inputString = s;
this.position = 0;
- this.token = new Token (null, TokenType.BOF);
+ this.token = new Token (null, TokenType.BOF, 0);
GetNextToken ();
}
}
if (token.Type == TokenType.EOF)
- throw new ExpressionParseException ("Cannot read past the end of stream.");
+ throw new ExpressionParseException (String.Format (
+ "Error while parsing condition \"{0}\", ended abruptly.",
+ inputString));
SkipWhiteSpace ();
int i = ReadChar ();
if (i == -1) {
- token = new Token (null, TokenType.EOF);
+ token = new Token (null, TokenType.EOF, tokenPosition);
return;
}
// maybe we should treat item reference as a token
if (ch == '-' && PeekChar () == '>') {
ReadChar ();
- token = new Token ("->", TokenType.Transform);
+ token = new Token ("->", TokenType.Transform, tokenPosition);
} else if (Char.IsDigit (ch) || ch == '-') {
StringBuilder sb = new StringBuilder ();
break;
}
- token = new Token (sb.ToString (), TokenType.Number);
- } else if (ch == '\'') {
+ token = new Token (sb.ToString (), TokenType.Number, tokenPosition);
+ } else if (ch == '\'' && position < inputString.Length) {
StringBuilder sb = new StringBuilder ();
string temp;
temp = sb.ToString ();
- token = new Token (temp.Substring (1, temp.Length - 2), TokenType.String);
+ token = new Token (temp.Substring (1, temp.Length - 2), TokenType.String, tokenPosition);
} else if (ch == '_' || Char.IsLetter (ch)) {
StringBuilder sb = new StringBuilder ();
string temp = sb.ToString ();
if (keywords.ContainsKey (temp))
- token = new Token (temp, keywords [temp]);
+ token = new Token (temp, keywords [temp], tokenPosition);
else
- token = new Token (temp, TokenType.String);
+ token = new Token (temp, TokenType.String, tokenPosition);
} else if (ch == '!' && PeekChar () == (int) '=') {
- token = new Token ("!=", TokenType.NotEqual);
+ token = new Token ("!=", TokenType.NotEqual, tokenPosition);
ReadChar ();
} else if (ch == '<' && PeekChar () == (int) '=') {
- token = new Token ("<=", TokenType.LessOrEqual);
+ token = new Token ("<=", TokenType.LessOrEqual, tokenPosition);
ReadChar ();
} else if (ch == '>' && PeekChar () == (int) '=') {
- token = new Token (">=", TokenType.GreaterOrEqual);
+ token = new Token (">=", TokenType.GreaterOrEqual, tokenPosition);
ReadChar ();
} else if (ch == '=' && PeekChar () == (int) '=') {
- token = new Token ("==", TokenType.Equal);
+ token = new Token ("==", TokenType.Equal, tokenPosition);
ReadChar ();
} else if (ch >= 32 && ch < 128) {
if (charIndexToTokenType [ch] != TokenType.Invalid) {
- token = new Token (new String (ch, 1), charIndexToTokenType [ch]);
+ token = new Token (new String (ch, 1), charIndexToTokenType [ch], tokenPosition);
return;
} else
throw new ExpressionParseException (String.Format ("Invalid punctuation: {0}", ch));
string tokenValue;
TokenType tokenType;
- public Token (string tokenValue, TokenType tokenType)
+ public Token (string tokenValue, TokenType tokenType, int position)
{
this.tokenValue = tokenValue;
this.tokenType = tokenType;
+ this.Position = position + 1;
}
public string Value {
get { return tokenType; }
}
+ // this is 1-based
+ public int Position {
+ get; private set;
+ }
+
+ public static string TypeAsString (TokenType tokenType)
+ {
+ switch (tokenType) {
+ case TokenType.Item:return "@";
+ case TokenType.Property:return "$";
+ case TokenType.Metadata:return "%";
+ case TokenType.Transform:return "->";
+ case TokenType.Less:return "<";
+ case TokenType.Greater:return ">";
+ case TokenType.LessOrEqual:return "<=";
+ case TokenType.GreaterOrEqual:return ">=";
+ case TokenType.Equal:return "=";
+ case TokenType.NotEqual:return "!=";
+ case TokenType.LeftParen:return "(";
+ case TokenType.RightParen:return ")";
+ case TokenType.Dot:return ".";
+ case TokenType.Comma:return ",";
+ case TokenType.Not:return "!";
+ case TokenType.And:return "and";
+ case TokenType.Or:return "or";
+ case TokenType.Apostrophe:return "'";
+ default: return tokenType.ToString ();
+ }
+ }
+
public override string ToString ()
{
- return String.Format ("Token (Type: {0} -> Value: {1})", tokenType, tokenValue);
+ if (tokenType == TokenType.EOF || tokenType == TokenType.BOF)
+ return String.Format ("{0} at character position {1}", tokenType.ToString (), Position);
+
+ return String.Format ("\"{0}\" at character position {1}", tokenValue, Position);
}
}