public class Tokenizer : yyParser.yyInput
{
+ class KeywordEntry
+ {
+ public readonly int Token;
+ public KeywordEntry Next;
+ public readonly char[] Value;
+
+ public KeywordEntry (string value, int token)
+ {
+ this.Value = value.ToCharArray ();
+ this.Token = token;
+ }
+ }
+
SeekableStreamReader reader;
SourceFile ref_name;
CompilationUnit file_name;
int current_token;
bool handle_get_set = false;
bool handle_remove_add = false;
- bool handle_assembly = false;
bool handle_where = false;
bool handle_typeof = false;
bool lambda_arguments_parsing;
- Location current_location;
Location current_comment_location = Location.Null;
- ArrayList escaped_identifiers = new ArrayList ();
+ ArrayList escaped_identifiers;
+ int parsing_generic_less_than;
//
// Used mainly for parser optimizations. Some expressions for instance
// scope only
//
public int parsing_block;
- internal int query_parsing;
+ internal bool query_parsing;
+
+ //
+ // When parsing type only, useful for ambiguous nullable types
+ //
+ public int parsing_type;
+
+ //
+ // Set when parsing generic declaration (type or method header)
+ //
+ public bool parsing_generic_declaration;
+
+ //
+ // The value indicates that we have not reach any declaration or
+ // namespace yet
+ //
+ public int parsing_declaration;
//
// The special character to inject on streams to trigger the EXPRESSION_PARSE
//
bool tokens_seen = false;
+ //
+ // Set to true once the GENERATE_COMPLETION token has bee
+ // returned. This helps produce one GENERATE_COMPLETION,
+ // as many COMPLETE_COMPLETION as necessary to complete the
+ // AST tree and one final EOF.
+ //
+ bool generated;
+
//
// Whether a token has been seen on the file
// This is needed because `define' is not allowed to be used
//
bool any_token_seen = false;
- static Hashtable token_values;
static readonly char[] simple_whitespaces = new char[] { ' ', '\t' };
- private static Hashtable TokenValueName
- {
- get {
- if (token_values == null)
- token_values = GetTokenValueNameHash ();
-
- return token_values;
- }
- }
-
- private static Hashtable GetTokenValueNameHash ()
- {
- Type t = typeof (Token);
- FieldInfo [] fields = t.GetFields ();
- Hashtable hash = new Hashtable ();
- foreach (FieldInfo field in fields) {
- if (field.IsLiteral && field.IsStatic && field.FieldType == typeof (int))
- hash.Add (field.GetValue (null), field.Name);
- }
- return hash;
- }
-
- //
- // Returns a verbose representation of the current location
- //
- public string location {
- get {
- string det;
-
- if (current_token == Token.ERROR)
- det = "detail: " + error_details;
- else
- det = "";
-
- // return "Line: "+line+" Col: "+col + "\n" +
- // "VirtLine: "+ref_line +
- // " Token: "+current_token + " " + det;
- string current_token_name = TokenValueName [current_token] as string;
- if (current_token_name == null)
- current_token_name = current_token.ToString ();
-
- return String.Format ("{0} ({1},{2}), Token: {3} {4}", ref_name.Name,
- ref_line,
- col,
- current_token_name,
- det);
- }
- }
-
public bool PropertyParsing {
get { return handle_get_set; }
set { handle_get_set = value; }
- }
-
- public bool AssemblyTargetParsing {
- get { return handle_assembly; }
- set { handle_assembly = value; }
}
public bool EventParsing {
}
}
+ //
+ // This is used to trigger completion generation on the parser
+ public bool CompleteOnEOF;
+
+ void AddEscapedIdentifier (LocatedToken lt)
+ {
+ if (escaped_identifiers == null)
+ escaped_identifiers = new ArrayList ();
+
+ escaped_identifiers.Add (lt);
+ }
+
public bool IsEscapedIdentifier (Location loc)
{
- foreach (LocatedToken lt in escaped_identifiers)
- if (lt.Location.Equals (loc))
- return true;
+ if (escaped_identifiers != null) {
+ foreach (LocatedToken lt in escaped_identifiers)
+ if (lt.Location.Equals (loc))
+ return true;
+ }
+
return false;
}
//
// Class variables
//
- static CharArrayHashtable[] keywords;
+ static KeywordEntry[][] keywords;
static Hashtable keyword_strings;
static NumberStyles styles;
static NumberFormatInfo csharp_format_info;
}
}
- public int Col {
- get {
- return col;
- }
- }
-
//
// This is used when the tokenizer needs to save
// the current position as it needs to do some parsing
static void AddKeyword (string kw, int token)
{
keyword_strings.Add (kw, kw);
- if (keywords [kw.Length] == null) {
- keywords [kw.Length] = new CharArrayHashtable (kw.Length);
+
+ int length = kw.Length;
+ if (keywords [length] == null) {
+ keywords [length] = new KeywordEntry ['z' - '_' + 1];
+ }
+
+ int char_index = kw [0] - '_';
+ KeywordEntry kwe = keywords [length] [char_index];
+ if (kwe == null) {
+ keywords [length] [char_index] = new KeywordEntry (kw, token);
+ return;
+ }
+
+ while (kwe.Next != null) {
+ kwe = kwe.Next;
}
- keywords [kw.Length] [kw.ToCharArray ()] = token;
+
+ kwe.Next = new KeywordEntry (kw, token);
}
static void InitTokens ()
{
keyword_strings = new Hashtable ();
- keywords = new CharArrayHashtable [64];
+
+ // 11 is the length of the longest keyword for now
+ keywords = new KeywordEntry [11] [];
AddKeyword ("__arglist", Token.ARGLIST);
AddKeyword ("abstract", Token.ABSTRACT);
AddKeyword ("as", Token.AS);
AddKeyword ("add", Token.ADD);
- AddKeyword ("assembly", Token.ASSEMBLY);
AddKeyword ("base", Token.BASE);
AddKeyword ("bool", Token.BOOL);
AddKeyword ("break", Token.BREAK);
//
static Tokenizer ()
{
- Reset ();
- }
-
- public static void Reset ()
- {
- InitTokens ();
+ InitTokens ();
csharp_format_info = NumberFormatInfo.InvariantInfo;
styles = NumberStyles.Float;
int GetKeyword (char[] id, int id_len)
{
- /*
- * Keywords are stored in an array of hashtables grouped by their
- * length.
- */
-
- if ((id_len >= keywords.Length) || (keywords [id_len] == null))
+ //
+ // Keywords are stored in an array of arrays grouped by their
+ // length and then by the first character
+ //
+ if (id_len >= keywords.Length || keywords [id_len] == null)
return -1;
- object o = keywords [id_len] [id];
- if (o == null)
+ int first_index = id [0] - '_';
+ if (first_index > 'z')
return -1;
-
- int res = (int) o;
- if (!handle_get_set && (res == Token.GET || res == Token.SET))
+ KeywordEntry kwe = keywords [id_len] [first_index];
+ if (kwe == null)
return -1;
- if (!handle_remove_add && (res == Token.REMOVE || res == Token.ADD))
- return -1;
- if (!handle_assembly && res == Token.ASSEMBLY)
+
+ int res;
+ do {
+ res = kwe.Token;
+ for (int i = 1; i < id_len; ++i) {
+ if (id [i] != kwe.Value [i]) {
+ res = 0;
+ break;
+ }
+ }
+ kwe = kwe.Next;
+ } while (kwe != null && res == 0);
+
+ if (res == 0)
return -1;
-
- //
- // A query expression is any expression that starts with `from identifier'
- // followed by any token except ; , =
- //
- if (query_parsing == 0) {
- if (res == Token.FROM && !lambda_arguments_parsing) {
+
+ int next_token;
+ switch (res) {
+ case Token.GET:
+ case Token.SET:
+ if (!handle_get_set)
+ res = -1;
+ break;
+ case Token.REMOVE:
+ case Token.ADD:
+ if (!handle_remove_add)
+ res = -1;
+ break;
+ case Token.EXTERN:
+ if (parsing_declaration == 0)
+ res = Token.EXTERN_ALIAS;
+ break;
+ case Token.DEFAULT:
+ if (peek_token () == Token.COLON) {
+ token ();
+ res = Token.DEFAULT_COLON;
+ }
+ break;
+ case Token.WHERE:
+ if (!handle_where && !query_parsing)
+ res = -1;
+ break;
+ case Token.FROM:
+ //
+ // A query expression is any expression that starts with `from identifier'
+ // followed by any token except ; , =
+ //
+ if (!query_parsing) {
+ if (lambda_arguments_parsing) {
+ res = -1;
+ break;
+ }
+
PushPosition ();
// HACK: to disable generics micro-parser, because PushPosition does not
// store identifiers array
parsing_generic_less_than = 1;
switch (xtoken ()) {
- case Token.IDENTIFIER:
- case Token.INT:
- case Token.BOOL:
- case Token.BYTE:
- case Token.CHAR:
- case Token.DECIMAL:
- case Token.FLOAT:
- case Token.LONG:
- case Token.OBJECT:
- case Token.STRING:
- case Token.UINT:
- case Token.ULONG:
- int next_token = xtoken ();
- if (next_token == Token.SEMICOLON || next_token == Token.COMMA || next_token == Token.EQUALS)
- goto default;
-
- ++query_parsing;
- if (RootContext.Version <= LanguageVersion.ISO_2)
- Report.FeatureIsNotAvailable (Location, "query expressions");
- break;
- case Token.VOID:
- Expression.Error_VoidInvalidInTheContext (Location);
- break;
- default:
- PopPosition ();
- // HACK: A token is not a keyword so we need to restore identifiers buffer
- // which has been overwritten before we grabbed the identifier
- id_builder [0] = 'f'; id_builder [1] = 'r'; id_builder [2] = 'o'; id_builder [3] = 'm';
- return -1;
+ case Token.IDENTIFIER:
+ case Token.INT:
+ case Token.BOOL:
+ case Token.BYTE:
+ case Token.CHAR:
+ case Token.DECIMAL:
+ case Token.FLOAT:
+ case Token.LONG:
+ case Token.OBJECT:
+ case Token.STRING:
+ case Token.UINT:
+ case Token.ULONG:
+ next_token = xtoken ();
+ if (next_token == Token.SEMICOLON || next_token == Token.COMMA || next_token == Token.EQUALS)
+ goto default;
+
+ res = Token.FROM_FIRST;
+ query_parsing = true;
+ if (RootContext.Version <= LanguageVersion.ISO_2)
+ Report.FeatureIsNotAvailable (Location, "query expressions");
+ break;
+ case Token.VOID:
+ Expression.Error_VoidInvalidInTheContext (Location);
+ break;
+ default:
+ PopPosition ();
+ // HACK: A token is not a keyword so we need to restore identifiers buffer
+ // which has been overwritten before we grabbed the identifier
+ id_builder [0] = 'f'; id_builder [1] = 'r'; id_builder [2] = 'o'; id_builder [3] = 'm';
+ return -1;
}
PopPosition ();
+ }
+ break;
+ case Token.JOIN:
+ case Token.ON:
+ case Token.EQUALS:
+ case Token.SELECT:
+ case Token.GROUP:
+ case Token.BY:
+ case Token.LET:
+ case Token.ORDERBY:
+ case Token.ASCENDING:
+ case Token.DESCENDING:
+ case Token.INTO:
+ if (!query_parsing)
+ res = -1;
+ break;
+
+ case Token.USING:
+ case Token.NAMESPACE:
+ // TODO: some explanation needed
+ check_incorrect_doc_comment ();
+ break;
+
+ case Token.PARTIAL:
+ if (parsing_block > 0) {
+ res = -1;
+ break;
+ }
+
+ // Save current position and parse next token.
+ PushPosition ();
+
+ next_token = token ();
+ bool ok = (next_token == Token.CLASS) ||
+ (next_token == Token.STRUCT) ||
+ (next_token == Token.INTERFACE) ||
+ (next_token == Token.VOID);
+
+ PopPosition ();
+
+ if (ok) {
+ if (next_token == Token.VOID) {
+ if (RootContext.Version == LanguageVersion.ISO_1 ||
+ RootContext.Version == LanguageVersion.ISO_2)
+ Report.FeatureIsNotAvailable (Location, "partial methods");
+ } else if (RootContext.Version == LanguageVersion.ISO_1)
+ Report.FeatureIsNotAvailable (Location, "partial types");
+
return res;
}
- if (res > Token.QUERY_FIRST_TOKEN && res < Token.QUERY_LAST_TOKEN)
- return -1;
+ if (next_token < Token.LAST_KEYWORD) {
+ Report.Error (267, Location,
+ "The `partial' modifier can be used only immediately before `class', `struct', `interface', or `void' keyword");
+ return token ();
+ }
+
+ res = -1;
+ break;
}
- if (res == Token.WHERE && !handle_where && query_parsing == 0)
- return -1;
-
return res;
}
public Location Location {
- get { return current_location; }
+ get {
+ return new Location (ref_line, hidden ? -1 : col);
+ }
}
public Tokenizer (SeekableStreamReader input, CompilationUnit file)
Mono.CSharp.Location.Push (file, file);
}
- static bool is_identifier_start_character (char c)
+ static bool is_identifier_start_character (int c)
{
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || Char.IsLetter (c);
+ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || Char.IsLetter ((char)c);
}
static bool is_identifier_part_character (char c)
{
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || (c >= '0' && c <= '9') ||
- Char.IsLetter (c) || Char.GetUnicodeCategory (c) == UnicodeCategory.ConnectorPunctuation;
+ if (c >= 'a' && c <= 'z')
+ return true;
+
+ if (c >= 'A' && c <= 'Z')
+ return true;
+
+ if (c == '_' || (c >= '0' && c <= '9'))
+ return true;
+
+ if (c < 0x80)
+ return false;
+
+ return Char.IsLetter (c) || Char.GetUnicodeCategory (c) == UnicodeCategory.ConnectorPunctuation;
}
public static bool IsKeyword (string s)
}
//
- // Tests whether '(' is beggining of lambda parameters
- //
- bool IsLambdaOpenParens ()
- {
- int ntoken;
- while ((ntoken = xtoken ()) != Token.EOF) {
- switch (ntoken) {
- case Token.CLOSE_PARENS:
- return xtoken () == Token.ARROW;
-
- case Token.STAR:
- case Token.SEMICOLON:
- case Token.OPEN_BRACE:
- case Token.OPEN_PARENS:
- case Token.LITERAL_STRING:
- case Token.LITERAL_INTEGER:
- case Token.LITERAL_FLOAT:
- case Token.LITERAL_DOUBLE:
- case Token.LITERAL_DECIMAL:
- case Token.LITERAL_CHARACTER:
- case Token.NULL:
- case Token.FALSE:
- case Token.TRUE:
- case Token.OP_INC:
- case Token.OP_DEC:
- case Token.OP_SHIFT_LEFT:
- case Token.OP_SHIFT_RIGHT:
- case Token.OP_LE:
- case Token.OP_GE:
- case Token.OP_EQ:
- case Token.OP_NE:
- case Token.OP_AND:
- case Token.OP_OR:
- case Token.BITWISE_AND:
- case Token.BITWISE_OR:
- case Token.PLUS:
- case Token.MINUS:
- case Token.DIV:
- case Token.NEW:
- case Token.THIS:
- case Token.BASE:
- case Token.TYPEOF:
- return false;
+ // Open parens micro parser. Detects both lambda and cast ambiguity.
+ //
+
+ int TokenizeOpenParens ()
+ {
+ int ptoken;
+ current_token = -1;
+
+ int bracket_level = 0;
+ bool is_type = false;
+ bool can_be_type = false;
+
+ while (true) {
+ ptoken = current_token;
+ token ();
+
+ switch (current_token) {
+ case Token.CLOSE_PARENS:
+ token ();
+
+ //
+ // Expression inside parens is lambda, (int i) =>
+ //
+ if (current_token == Token.ARROW) {
+ if (RootContext.Version <= LanguageVersion.ISO_2)
+ Report.FeatureIsNotAvailable (Location, "lambda expressions");
+
+ return Token.OPEN_PARENS_LAMBDA;
+ }
+
+ //
+ // Expression inside parens is single type, (int[])
+ //
+ if (is_type)
+ return Token.OPEN_PARENS_CAST;
+
+ //
+ // Expression is possible cast, look at next token, (T)null
+ //
+ if (can_be_type) {
+ switch (current_token) {
+ case Token.OPEN_PARENS:
+ case Token.BANG:
+ case Token.TILDE:
+ case Token.IDENTIFIER:
+ case Token.LITERAL_INTEGER:
+ case Token.LITERAL_FLOAT:
+ case Token.LITERAL_DOUBLE:
+ case Token.LITERAL_DECIMAL:
+ case Token.LITERAL_CHARACTER:
+ case Token.LITERAL_STRING:
+ case Token.BASE:
+ case Token.CHECKED:
+ case Token.DELEGATE:
+ case Token.FALSE:
+ case Token.FIXED:
+ case Token.NEW:
+ case Token.NULL:
+ case Token.SIZEOF:
+ case Token.THIS:
+ case Token.THROW:
+ case Token.TRUE:
+ case Token.TYPEOF:
+ case Token.UNCHECKED:
+ case Token.UNSAFE:
+ case Token.DEFAULT:
+
+ //
+ // These can be part of a member access
+ //
+ case Token.INT:
+ case Token.UINT:
+ case Token.SHORT:
+ case Token.USHORT:
+ case Token.LONG:
+ case Token.ULONG:
+ case Token.DOUBLE:
+ case Token.FLOAT:
+ case Token.CHAR:
+ case Token.BYTE:
+ case Token.DECIMAL:
+ case Token.BOOL:
+ return Token.OPEN_PARENS_CAST;
+ }
+ }
+ return Token.OPEN_PARENS;
+
+ case Token.DOT:
+ case Token.DOUBLE_COLON:
+ if (ptoken != Token.IDENTIFIER && ptoken != Token.OP_GENERICS_GT)
+ goto default;
+
+ continue;
+
+ case Token.IDENTIFIER:
+ switch (ptoken) {
+ case Token.DOT:
+ case Token.OP_GENERICS_LT:
+ case Token.COMMA:
+ case Token.DOUBLE_COLON:
+ case -1:
+ if (bracket_level == 0)
+ can_be_type = true;
+ continue;
+ default:
+ can_be_type = is_type = false;
+ continue;
+ }
+
+ case Token.OBJECT:
+ case Token.STRING:
+ case Token.BOOL:
+ case Token.DECIMAL:
+ case Token.FLOAT:
+ case Token.DOUBLE:
+ case Token.SBYTE:
+ case Token.BYTE:
+ case Token.SHORT:
+ case Token.USHORT:
+ case Token.INT:
+ case Token.UINT:
+ case Token.LONG:
+ case Token.ULONG:
+ case Token.CHAR:
+ case Token.VOID:
+ if (bracket_level == 0)
+ is_type = true;
+ continue;
+
+ case Token.COMMA:
+ if (bracket_level == 0) {
+ bracket_level = 100;
+ can_be_type = is_type = false;
+ }
+ continue;
+
+ case Token.OP_GENERICS_LT:
+ case Token.OPEN_BRACKET:
+ if (bracket_level++ == 0)
+ is_type = true;
+ continue;
+
+ case Token.OP_GENERICS_GT:
+ case Token.CLOSE_BRACKET:
+ --bracket_level;
+ continue;
+
+ case Token.INTERR_NULLABLE:
+ case Token.STAR:
+ if (bracket_level == 0)
+ is_type = true;
+ continue;
+
+ case Token.REF:
+ case Token.OUT:
+ can_be_type = is_type = false;
+ continue;
+
+ default:
+ return Token.OPEN_PARENS;
}
}
-
- Error_TokenExpected (",' or `)");
- return false;
}
public static bool IsValidIdentifier (string s)
the_token = token ();
} while (the_token != Token.CLOSE_BRACKET);
the_token = token ();
+ } else if (the_token == Token.IN || the_token == Token.OUT) {
+ the_token = token ();
}
switch (the_token) {
case Token.IDENTIFIER:
case Token.CHAR:
case Token.VOID:
break;
-
case Token.OP_GENERICS_GT:
return true;
return the_token;
}
- int parsing_generic_less_than = 0;
-
- int is_punct (char c, ref bool doread)
+ //
+ // Tonizes `?' using custom disambiguous rules to return one
+ // of following tokens: INTERR_NULLABLE, OP_COALESCING, INTERR
+ //
+ // Tricky expression look like:
+ //
+ // Foo ? a = x ? b : c;
+ //
+ int TokenizePossibleNullableType ()
{
- int d;
- int t;
-
- doread = false;
-
- switch (c){
- case '{':
- val = Location;
- return Token.OPEN_BRACE;
- case '}':
- val = Location;
- return Token.CLOSE_BRACE;
- case '[':
- // To block doccomment inside attribute declaration.
- if (doc_state == XmlCommentState.Allowed)
- doc_state = XmlCommentState.NotAllowed;
- return Token.OPEN_BRACKET;
- case ']':
- return Token.CLOSE_BRACKET;
- case '(':
- //
- // A lambda expression can appear in block context only
- //
- if (parsing_block != 0 && !lambda_arguments_parsing) {
- lambda_arguments_parsing = true;
- PushPosition ();
- bool lambda_start = IsLambdaOpenParens ();
- PopPosition ();
- lambda_arguments_parsing = false;
- if (lambda_start) {
- if (RootContext.Version <= LanguageVersion.ISO_2)
- Report.FeatureIsNotAvailable (Location, "lambda expressions");
-
- return Token.OPEN_PARENS_LAMBDA;
- }
- }
- return Token.OPEN_PARENS;
- case ')': {
- if (deambiguate_close_parens == 0)
- return Token.CLOSE_PARENS;
-
- --deambiguate_close_parens;
-
- PushPosition ();
+ if (parsing_block == 0 || parsing_type > 0)
+ return Token.INTERR_NULLABLE;
- int new_token = xtoken ();
-
- PopPosition ();
-
- if (new_token == Token.OPEN_PARENS)
- return Token.CLOSE_PARENS_OPEN_PARENS;
- else if (new_token == Token.MINUS)
- return Token.CLOSE_PARENS_MINUS;
- else if (IsCastToken (new_token))
- return Token.CLOSE_PARENS_CAST;
- else
- return Token.CLOSE_PARENS_NO_CAST;
- }
-
- case ',':
- return Token.COMMA;
- case ';':
- val = Location;
- return Token.SEMICOLON;
- case '~':
- val = Location;
- return Token.TILDE;
- case '?':
- return TokenizePossibleNullableType ();
- }
-
- if (c == '<') {
- if (parsing_generic_less_than++ > 0)
- return Token.OP_GENERICS_LT;
-
- if (handle_typeof) {
- int dimension;
- PushPosition ();
- if (parse_generic_dimension (out dimension)) {
- val = dimension;
- DiscardPosition ();
- return Token.GENERIC_DIMENSION;
- }
- PopPosition ();
- }
-
- // Save current position and parse next token.
- PushPosition ();
- bool is_generic_lt = parse_less_than ();
- PopPosition ();
-
- if (is_generic_lt) {
- return Token.OP_GENERICS_LT;
- } else
- parsing_generic_less_than = 0;
-
- d = peek_char ();
- if (d == '<'){
- get_char ();
- d = peek_char ();
-
- if (d == '='){
- doread = true;
- return Token.OP_SHIFT_LEFT_ASSIGN;
- }
- return Token.OP_SHIFT_LEFT;
- } else if (d == '='){
- doread = true;
- return Token.OP_LE;
- }
- return Token.OP_LT;
- } else if (c == '>') {
- d = peek_char ();
-
- if (d == '='){
- doread = true;
- return Token.OP_GE;
- }
-
- if (parsing_generic_less_than > 1 || (parsing_generic_less_than == 1 && d != '>')) {
- parsing_generic_less_than--;
- return Token.OP_GENERICS_GT;
- }
-
- if (d == '>') {
- get_char ();
- d = peek_char ();
-
- if (d == '=') {
- doread = true;
- return Token.OP_SHIFT_RIGHT_ASSIGN;
- }
- return Token.OP_SHIFT_RIGHT;
- }
-
- return Token.OP_GT;
- }
-
- d = peek_char ();
- if (c == '+'){
-
- if (d == '+') {
- val = Location;
- t = Token.OP_INC;
- }
- else if (d == '=')
- t = Token.OP_ADD_ASSIGN;
- else {
- val = Location;
- return Token.PLUS;
- }
- doread = true;
- return t;
- }
- if (c == '-'){
- if (d == '-') {
- val = Location;
- t = Token.OP_DEC;
- }
- else if (d == '=')
- t = Token.OP_SUB_ASSIGN;
- else if (d == '>')
- t = Token.OP_PTR;
- else {
- val = Location;
- return Token.MINUS;
- }
- doread = true;
- return t;
- }
-
- if (c == '!'){
- if (d == '='){
- doread = true;
- return Token.OP_NE;
- }
- val = Location;
- return Token.BANG;
- }
-
- if (c == '='){
- if (d == '='){
- doread = true;
- return Token.OP_EQ;
- }
- if (d == '>'){
- doread = true;
- val = Location;
- return Token.ARROW;
- }
-
- return Token.ASSIGN;
- }
-
- if (c == '&'){
- if (d == '&'){
- doread = true;
- return Token.OP_AND;
- } else if (d == '='){
- doread = true;
- return Token.OP_AND_ASSIGN;
- }
- val = Location;
- return Token.BITWISE_AND;
- }
-
- if (c == '|'){
- if (d == '|'){
- doread = true;
- return Token.OP_OR;
- } else if (d == '='){
- doread = true;
- return Token.OP_OR_ASSIGN;
- }
- return Token.BITWISE_OR;
- }
-
- if (c == '*'){
- if (d == '='){
- doread = true;
- return Token.OP_MULT_ASSIGN;
- }
- val = Location;
- return Token.STAR;
- }
-
- if (c == '/'){
- if (d == '='){
- doread = true;
- return Token.OP_DIV_ASSIGN;
- }
- return Token.DIV;
- }
-
- if (c == '%'){
- if (d == '='){
- doread = true;
- return Token.OP_MOD_ASSIGN;
- }
- return Token.PERCENT;
- }
-
- if (c == '^'){
- if (d == '='){
- doread = true;
- return Token.OP_XOR_ASSIGN;
- }
- return Token.CARRET;
- }
-
- if (c == ':'){
- if (d == ':'){
- doread = true;
- return Token.DOUBLE_COLON;
- }
- val = Location;
- return Token.COLON;
- }
-
- return Token.ERROR;
- }
-
- //
- // Tonizes `?' using custom disambiguous rules to return one
- // of following tokens: INTERR_NULLABLE, OP_COALESCING, INTERR
- //
- // Tricky expression look like:
- //
- // Foo ? a = x ? b : c;
- //
- int TokenizePossibleNullableType ()
- {
- if (parsing_block == 0)
- return Token.INTERR_NULLABLE;
-
- int d = peek_char ();
- if (d == '?') {
- get_char ();
- return Token.OP_COALESCING;
- }
+ int d = peek_char ();
+ if (d == '?') {
+ get_char ();
+ return Token.OP_COALESCING;
+ }
switch (current_token) {
- case Token.CLOSE_PARENS:
- case Token.TRUE:
- case Token.FALSE:
- case Token.NULL:
- case Token.LITERAL_INTEGER:
- case Token.LITERAL_STRING:
- return Token.INTERR;
+ case Token.CLOSE_PARENS:
+ case Token.TRUE:
+ case Token.FALSE:
+ case Token.NULL:
+ case Token.LITERAL_INTEGER:
+ case Token.LITERAL_STRING:
+ return Token.INTERR;
}
if (d != ' ') {
}
PushPosition ();
+ current_token = Token.NONE;
int next_token;
switch (xtoken ()) {
- case Token.LITERAL_INTEGER:
- case Token.LITERAL_STRING:
- case Token.LITERAL_CHARACTER:
- case Token.LITERAL_DECIMAL:
- case Token.LITERAL_DOUBLE:
- case Token.LITERAL_FLOAT:
- case Token.TRUE:
- case Token.FALSE:
- case Token.NULL:
- case Token.THIS:
- next_token = Token.INTERR;
- break;
+ case Token.LITERAL_INTEGER:
+ case Token.LITERAL_STRING:
+ case Token.LITERAL_CHARACTER:
+ case Token.LITERAL_DECIMAL:
+ case Token.LITERAL_DOUBLE:
+ case Token.LITERAL_FLOAT:
+ case Token.TRUE:
+ case Token.FALSE:
+ case Token.NULL:
+ case Token.THIS:
+ case Token.NEW:
+ next_token = Token.INTERR;
+ break;
+
+ case Token.SEMICOLON:
+ case Token.COMMA:
+ case Token.CLOSE_PARENS:
+ case Token.OPEN_BRACKET:
+ case Token.OP_GENERICS_GT:
+ next_token = Token.INTERR_NULLABLE;
+ break;
+
+ default:
+ next_token = -1;
+ break;
+ }
- case Token.SEMICOLON:
+ if (next_token == -1) {
+ switch (xtoken ()) {
case Token.COMMA:
+ case Token.SEMICOLON:
+ case Token.OPEN_BRACE:
case Token.CLOSE_PARENS:
- case Token.OPEN_BRACKET:
- case Token.OP_GENERICS_GT:
+ case Token.IN:
next_token = Token.INTERR_NULLABLE;
break;
-
+
+ case Token.COLON:
+ next_token = Token.INTERR;
+ break;
+
default:
- next_token = -1;
- break;
- }
-
- if (next_token == -1) {
- switch (xtoken ()) {
- case Token.COMMA:
- case Token.SEMICOLON:
- case Token.OPEN_BRACE:
- case Token.CLOSE_PARENS:
- case Token.IN:
- next_token = Token.INTERR_NULLABLE;
- break;
-
- case Token.COLON:
- next_token = Token.INTERR;
- break;
-
- default:
- int ntoken;
- int interrs = 1;
- int colons = 0;
- //
- // All shorcuts failed, do it hard way
- //
- while ((ntoken = xtoken ()) != Token.EOF) {
+ int ntoken;
+ int interrs = 1;
+ int colons = 0;
+ //
+ // All shorcuts failed, do it hard way
+ //
+ while ((ntoken = xtoken ()) != Token.EOF) {
if (ntoken == Token.SEMICOLON)
+ break;
+
+ if (ntoken == Token.COLON) {
+ if (++colons == interrs)
break;
-
- if (ntoken == Token.COLON) {
- if (++colons == interrs)
- break;
- continue;
- }
-
- if (ntoken == Token.INTERR) {
- ++interrs;
- continue;
- }
+ continue;
}
-
- next_token = colons != interrs ? Token.INTERR_NULLABLE : Token.INTERR;
- break;
+
+ if (ntoken == Token.INTERR) {
+ ++interrs;
+ continue;
+ }
+ }
+
+ next_token = colons != interrs ? Token.INTERR_NULLABLE : Token.INTERR;
+ break;
}
}
return next_token;
}
- int deambiguate_close_parens = 0;
-
- public void Deambiguate_CloseParens (object expression)
- {
- putback (')');
-
- // When any binary operation, a conditional is used we are sure it is not a cast
- // maybe more.
-
- if (expression is Binary || expression is Conditional)
- return;
-
- deambiguate_close_parens++;
- }
-
bool decimal_digits (int c)
{
int d;
int peek_char ()
{
- if (putback_char != -1)
- return putback_char;
- putback_char = reader.Read ();
+ if (putback_char == -1)
+ putback_char = reader.Read ();
return putback_char;
}
public bool advance ()
{
- return peek_char () != -1;
+ return peek_char () != -1 || CompleteOnEOF;
}
public Object Value {
return val;
}
- static bool IsCastToken (int token)
- {
- switch (token) {
- case Token.BANG:
- case Token.TILDE:
- case Token.IDENTIFIER:
- case Token.LITERAL_INTEGER:
- case Token.LITERAL_FLOAT:
- case Token.LITERAL_DOUBLE:
- case Token.LITERAL_DECIMAL:
- case Token.LITERAL_CHARACTER:
- case Token.LITERAL_STRING:
- case Token.BASE:
- case Token.CHECKED:
- case Token.DELEGATE:
- case Token.FALSE:
- case Token.FIXED:
- case Token.NEW:
- case Token.NULL:
- case Token.SIZEOF:
- case Token.THIS:
- case Token.THROW:
- case Token.TRUE:
- case Token.TYPEOF:
- case Token.UNCHECKED:
- case Token.UNSAFE:
- case Token.DEFAULT:
-
- //
- // These can be part of a member access
- //
- case Token.INT:
- case Token.UINT:
- case Token.SHORT:
- case Token.USHORT:
- case Token.LONG:
- case Token.ULONG:
- case Token.DOUBLE:
- case Token.FLOAT:
- case Token.CHAR:
- case Token.BYTE:
- case Token.DECIMAL:
- return true;
-
- default:
- return false;
- }
- }
-
public int token ()
{
current_token = xtoken ();
-
- if (current_token != Token.DEFAULT)
- return current_token;
-
- PushPosition();
- int c = xtoken();
- if (c == -1)
- current_token = Token.ERROR;
- else if (c == Token.OPEN_PARENS)
- current_token = Token.DEFAULT_OPEN_PARENS;
- else if (c == Token.COLON)
- current_token = Token.DEFAULT_COLON;
- else
- PopPosition();
-
return current_token;
}
c = get_char ();
static_cmd_arg.Length = 0;
+ int has_identifier_argument = 0;
+
while (c != -1 && c != '\n' && c != '\r') {
- if (c == '\\') {
- int peek = peek_char ();
- if (peek == 'U' || peek == 'u') {
- int surrogate;
- c = EscapeUnicode (c, out surrogate);
- if (surrogate != 0) {
- if (is_identifier_part_character ((char) c))
- static_cmd_arg.Append ((char) c);
- c = surrogate;
+ if (c == '\\' && has_identifier_argument >= 0) {
+ if (has_identifier_argument != 0 || (cmd == "define" || cmd == "if" || cmd == "elif" || cmd == "undef")) {
+ has_identifier_argument = 1;
+
+ int peek = peek_char ();
+ if (peek == 'U' || peek == 'u') {
+ int surrogate;
+ c = EscapeUnicode (c, out surrogate);
+ if (surrogate != 0) {
+ if (is_identifier_part_character ((char) c))
+ static_cmd_arg.Append ((char) c);
+ c = surrogate;
+ }
}
+ } else {
+ has_identifier_argument = -1;
}
}
static_cmd_arg.Append ((char) c);
void PreProcessDefinition (bool is_define, string ident, bool caller_is_taking)
{
if (ident.Length == 0 || ident == "true" || ident == "false"){
- Report.Error (1001, Location, "Missing identifer to pre-processor directive");
+ Report.Error (1001, Location, "Missing identifier to pre-processor directive");
return;
}
"Unexpected processor directive ({0})", extra);
}
- void Error_TokenExpected (string token)
- {
- Report.Error (1026, Location, "Expected `{0}'", token);
- }
-
void Error_TokensSeen ()
{
Report.Error (1032, Location,
return ret;
}
- case "define":
- if (any_token_seen){
- Error_TokensSeen ();
- return caller_is_taking;
- }
- PreProcessDefinition (true, arg, caller_is_taking);
+ case "define":
+ if (any_token_seen){
+ Error_TokensSeen ();
return caller_is_taking;
+ }
+ PreProcessDefinition (true, arg, caller_is_taking);
+ return caller_is_taking;
- case "undef":
- if (any_token_seen){
- Error_TokensSeen ();
- return caller_is_taking;
- }
- PreProcessDefinition (false, arg, caller_is_taking);
+ case "undef":
+ if (any_token_seen){
+ Error_TokensSeen ();
return caller_is_taking;
+ }
+ PreProcessDefinition (false, arg, caller_is_taking);
+ return caller_is_taking;
}
//
int c;
string_builder.Length = 0;
- //
- // No need to parse full string when parsing lambda arguments
- //
- if (lambda_arguments_parsing)
- return Token.LITERAL_STRING;
-
while ((c = get_char ()) != -1){
if (c == '"'){
if (quoted && peek_char () == '"'){
if (doc_state == XmlCommentState.Allowed)
doc_state = XmlCommentState.NotAllowed;
- switch (res) {
- case Token.USING:
- case Token.NAMESPACE:
- check_incorrect_doc_comment ();
- break;
- }
-
- if (res == Token.PARTIAL) {
- if (parsing_block > 0) {
- val = new LocatedToken (Location, "partial");
- return Token.IDENTIFIER;
- }
-
- // Save current position and parse next token.
- PushPosition ();
-
- int next_token = token ();
- bool ok = (next_token == Token.CLASS) ||
- (next_token == Token.STRUCT) ||
- (next_token == Token.INTERFACE) ||
- (next_token == Token.VOID);
-
- PopPosition ();
-
- if (ok) {
- if (next_token == Token.VOID) {
- if (RootContext.Version == LanguageVersion.ISO_1 ||
- RootContext.Version == LanguageVersion.ISO_2)
- Report.FeatureIsNotAvailable (Location, "partial methods");
- } else if (RootContext.Version == LanguageVersion.ISO_1)
- Report.FeatureIsNotAvailable (Location, "partial types");
-
- return res;
- }
-
- if (next_token < Token.LAST_KEYWORD) {
- Report.Error (267, Location,
- "The `partial' modifier can be used only immediately before `class', `struct', `interface', or `void' keyword");
- return token ();
- }
-
- val = new LocatedToken (Location, "partial");
- return Token.IDENTIFIER;
- }
return res;
}
}
id_builder [pos++] = (char) c;
- current_location = new Location (ref_line, hidden ? -1 : Col);
+ Location loc = Location;
while ((c = get_char ()) != -1) {
loop:
if (is_identifier_part_character ((char) c)){
if (pos == max_id_size){
- Report.Error (645, Location, "Identifier too long (limit is 512 chars)");
+ Report.Error (645, loc, "Identifier too long (limit is 512 chars)");
return Token.ERROR;
}
id_builder [pos++] = (char) c;
-// putback_char = -1;
} else if (c == '\\') {
int surrogate;
c = escape (c, out surrogate);
}
goto loop;
} else {
-// putback_char = c;
putback (c);
break;
}
if (id_builder [0] >= '_' && !quoted) {
int keyword = GetKeyword (id_builder, pos);
if (keyword != -1) {
- val = Location;
+ // TODO: No need to store location for keyword, required location cleanup
+ val = loc;
return keyword;
}
}
// Keep identifiers in an array of hashtables to avoid needless
// allocations
//
-
- if (identifiers [pos] != null) {
- val = identifiers [pos][id_builder];
+ CharArrayHashtable identifiers_group = identifiers [pos];
+ if (identifiers_group != null) {
+ val = identifiers_group [id_builder];
if (val != null) {
- val = new LocatedToken (Location, (string) val);
+ val = new LocatedToken (loc, (string) val);
if (quoted)
- escaped_identifiers.Add (val);
+ AddEscapedIdentifier ((LocatedToken) val);
return Token.IDENTIFIER;
}
+ } else {
+ identifiers_group = new CharArrayHashtable (pos);
+ identifiers [pos] = identifiers_group;
}
- else
- identifiers [pos] = new CharArrayHashtable (pos);
+
+ char [] chars = new char [pos];
+ Array.Copy (id_builder, chars, pos);
val = new String (id_builder, 0, pos);
+ identifiers_group.Add (chars, val);
+
if (RootContext.Version == LanguageVersion.ISO_1) {
- for (int i = 1; i < id_builder.Length; i += 3) {
- if (id_builder [i] == '_' && (id_builder [i - 1] == '_' || id_builder [i + 1] == '_')) {
- Report.Error (1638, Location,
+ for (int i = 1; i < chars.Length; i += 3) {
+ if (chars [i] == '_' && (chars [i - 1] == '_' || chars [i + 1] == '_')) {
+ Report.Error (1638, loc,
"`{0}': Any identifier with double underscores cannot be used when ISO language version mode is specified", val.ToString ());
- break;
}
}
}
- char [] chars = new char [pos];
- Array.Copy (id_builder, chars, pos);
-
- identifiers [pos] [chars] = val;
-
- val = new LocatedToken (Location, (string) val);
+ val = new LocatedToken (loc, (string) val);
if (quoted)
- escaped_identifiers.Add (val);
+ AddEscapedIdentifier ((LocatedToken) val);
return Token.IDENTIFIER;
}
public int xtoken ()
{
- int t;
- bool doread = false;
- int c;
+ int d, c;
// Whether we have seen comments on the current line
bool comments_seen = false;
- val = null;
- for (;(c = get_char ()) != -1;) {
- if (c == '\t'){
+ while ((c = get_char ()) != -1) {
+ switch (c) {
+ case '\t':
col = ((col + 8) / 8) * 8;
continue;
- }
-
- if (c == ' ' || c == '\f' || c == '\v' || c == 0xa0 || c == 0)
+
+ case ' ':
+ case '\f':
+ case '\v':
+ case 0xa0:
+ case 0:
+ case 0xFEFF: // Ignore BOM anywhere in the file
continue;
- if (c == '\r') {
+/* This is required for compatibility with .NET
+ case 0xEF:
+ if (peek_char () == 0xBB) {
+ PushPosition ();
+ get_char ();
+ if (get_char () == 0xBF)
+ continue;
+ PopPosition ();
+ }
+ break;
+*/
+ case '\r':
if (peek_char () != '\n')
advance_line ();
else
tokens_seen = false;
comments_seen = false;
continue;
- }
- // Handle double-slash comments.
- if (c == '/'){
- int d = peek_char ();
-
+ case '\\':
+ tokens_seen = true;
+ return consume_identifier (c);
+
+ case '{':
+ val = Location;
+ return Token.OPEN_BRACE;
+ case '}':
+ val = Location;
+ return Token.CLOSE_BRACE;
+ case '[':
+ // To block doccomment inside attribute declaration.
+ if (doc_state == XmlCommentState.Allowed)
+ doc_state = XmlCommentState.NotAllowed;
+ return Token.OPEN_BRACKET;
+ case ']':
+ return Token.CLOSE_BRACKET;
+ case '(':
+ val = Location;
+ //
+ // An expression versions of parens can appear in block context only
+ //
+ if (parsing_block != 0 && !lambda_arguments_parsing) {
+
+ //
+ // Optmize most common case where we know that parens
+ // is not special
+ //
+ switch (current_token) {
+ case Token.IDENTIFIER:
+ case Token.IF:
+ case Token.FOR:
+ case Token.FOREACH:
+ case Token.TYPEOF:
+ case Token.WHILE:
+ case Token.USING:
+ case Token.DEFAULT:
+ case Token.DELEGATE:
+ case Token.OP_GENERICS_GT:
+ return Token.OPEN_PARENS;
+ }
+
+ // Optimize using peek
+ int xx = peek_char ();
+ switch (xx) {
+ case '(':
+ case '\'':
+ case '"':
+ case '0':
+ case '1':
+ return Token.OPEN_PARENS;
+ }
+
+ lambda_arguments_parsing = true;
+ PushPosition ();
+ d = TokenizeOpenParens ();
+ PopPosition ();
+ lambda_arguments_parsing = false;
+ return d;
+ }
+
+ return Token.OPEN_PARENS;
+ case ')':
+ return Token.CLOSE_PARENS;
+ case ',':
+ return Token.COMMA;
+ case ';':
+ return Token.SEMICOLON;
+ case '~':
+ return Token.TILDE;
+ case '?':
+ return TokenizePossibleNullableType ();
+ case '<':
+ if (parsing_generic_less_than++ > 0)
+ return Token.OP_GENERICS_LT;
+
+ return TokenizeLessThan ();
+
+ case '>':
+ d = peek_char ();
+
+ if (d == '='){
+ get_char ();
+ return Token.OP_GE;
+ }
+
+ if (parsing_generic_less_than > 1 || (parsing_generic_less_than == 1 && d != '>')) {
+ parsing_generic_less_than--;
+ return Token.OP_GENERICS_GT;
+ }
+
+ if (d == '>') {
+ get_char ();
+ d = peek_char ();
+
+ if (d == '=') {
+ get_char ();
+ return Token.OP_SHIFT_RIGHT_ASSIGN;
+ }
+ return Token.OP_SHIFT_RIGHT;
+ }
+
+ return Token.OP_GT;
+
+ case '+':
+ d = peek_char ();
+ if (d == '+') {
+ d = Token.OP_INC;
+ } else if (d == '=') {
+ d = Token.OP_ADD_ASSIGN;
+ } else {
+ return Token.PLUS;
+ }
+ get_char ();
+ return d;
+
+ case '-':
+ d = peek_char ();
+ if (d == '-') {
+ d = Token.OP_DEC;
+ } else if (d == '=')
+ d = Token.OP_SUB_ASSIGN;
+ else if (d == '>')
+ d = Token.OP_PTR;
+ else {
+ return Token.MINUS;
+ }
+ get_char ();
+ return d;
+
+ case '!':
+ if (peek_char () == '='){
+ get_char ();
+ return Token.OP_NE;
+ }
+ return Token.BANG;
+
+ case '=':
+ d = peek_char ();
+ if (d == '='){
+ get_char ();
+ return Token.OP_EQ;
+ }
+ if (d == '>'){
+ get_char ();
+ return Token.ARROW;
+ }
+
+ return Token.ASSIGN;
+
+ case '&':
+ d = peek_char ();
+ if (d == '&'){
+ get_char ();
+ return Token.OP_AND;
+ }
+ if (d == '='){
+ get_char ();
+ return Token.OP_AND_ASSIGN;
+ }
+ return Token.BITWISE_AND;
+
+ case '|':
+ d = peek_char ();
+ if (d == '|'){
+ get_char ();
+ return Token.OP_OR;
+ }
+ if (d == '='){
+ get_char ();
+ return Token.OP_OR_ASSIGN;
+ }
+ return Token.BITWISE_OR;
+
+ case '*':
+ if (peek_char () == '='){
+ get_char ();
+ return Token.OP_MULT_ASSIGN;
+ }
+ val = Location;
+ return Token.STAR;
+
+ case '/':
+ d = peek_char ();
+ if (d == '='){
+ get_char ();
+ return Token.OP_DIV_ASSIGN;
+ }
+
+ // Handle double-slash comments.
if (d == '/'){
get_char ();
if (RootContext.Documentation != null && peek_char () == '/') {
xml_comment_buffer.Append (Environment.NewLine);
}
- Location start_location = Location;
-
while ((d = get_char ()) != -1){
if (d == '*' && peek_char () == '/'){
get_char ();
}
}
if (!comments_seen)
- Report.Error (1035, start_location, "End-of-file found, '*/' expected");
+ Report.Error (1035, Location, "End-of-file found, '*/' expected");
if (docAppend)
update_formatted_doc_comment (current_comment_start);
continue;
}
- goto is_punct_label;
- }
+ return Token.DIV;
-
- if (c == '\\' || is_identifier_start_character ((char)c)){
- tokens_seen = true;
- return consume_identifier (c);
- }
+ case '%':
+ if (peek_char () == '='){
+ get_char ();
+ return Token.OP_MOD_ASSIGN;
+ }
+ return Token.PERCENT;
- is_punct_label:
- current_location = new Location (ref_line, hidden ? -1 : Col);
- if ((t = is_punct ((char)c, ref doread)) != Token.ERROR){
- tokens_seen = true;
- if (doread){
+ case '^':
+ if (peek_char () == '='){
get_char ();
+ return Token.OP_XOR_ASSIGN;
}
- return t;
- }
+ return Token.CARRET;
- // white space
- if (c == '\n'){
+ case ':':
+ if (peek_char () == ':') {
+ get_char ();
+ return Token.DOUBLE_COLON;
+ }
+ return Token.COLON;
+
+ case '0': case '1': case '2': case '3': case '4':
+ case '5': case '6': case '7': case '8': case '9':
+ tokens_seen = true;
+ return is_number (c);
+
+ case '\n': // white space
any_token_seen |= tokens_seen;
tokens_seen = false;
comments_seen = false;
continue;
- }
-
- if (c >= '0' && c <= '9'){
- tokens_seen = true;
- return is_number (c);
- }
- if (c == '.'){
+ case '.':
tokens_seen = true;
- int peek = peek_char ();
- if (peek >= '0' && peek <= '9')
+ d = peek_char ();
+ if (d >= '0' && d <= '9')
return is_number (c);
return Token.DOT;
- }
- if (c == '#') {
+ case '#':
if (tokens_seen || comments_seen) {
Eror_WrongPreprocessorLocation ();
return Token.ERROR;
}
return Token.EOF;
- }
- if (c == '"')
+ case '"':
return consume_string (false);
- if (c == '\''){
- c = get_char ();
- tokens_seen = true;
- if (c == '\''){
- error_details = "Empty character literal";
- Report.Error (1011, Location, error_details);
- return Token.ERROR;
- }
- if (c == '\r' || c == '\n') {
- Report.Error (1010, Location, "Newline in constant");
- return Token.ERROR;
- }
-
- int surrogate;
- c = escape (c, out surrogate);
- if (c == -1)
- return Token.ERROR;
- if (surrogate != 0)
- throw new NotImplementedException ();
-
- val = (char) c;
- c = get_char ();
-
- if (c != '\''){
- error_details = "Too many characters in character literal";
- Report.Error (1012, Location, error_details);
-
- // Try to recover, read until newline or next "'"
- while ((c = get_char ()) != -1){
- if (c == '\n'){
- break;
- }
- else if (c == '\'')
- break;
- }
- return Token.ERROR;
- }
- return Token.LITERAL_CHARACTER;
- }
+ case '\'':
+ return TokenizeBackslash ();
- if (c == '@') {
+ case '@':
c = get_char ();
if (c == '"') {
tokens_seen = true;
return consume_string (true);
- } else if (is_identifier_start_character ((char) c)){
+ }
+
+ if (is_identifier_start_character (c)){
return consume_identifier (c, true);
- } else {
- Report.Error (1646, Location, "Keyword, identifier, or string expected after verbatim specifier: @");
}
- }
- if (c == EvalStatementParserCharacter)
+ Report.Error (1646, Location, "Keyword, identifier, or string expected after verbatim specifier: @");
+ return Token.ERROR;
+
+ case EvalStatementParserCharacter:
return Token.EVAL_STATEMENT_PARSER;
- if (c == EvalCompilationUnitParserCharacter)
+ case EvalCompilationUnitParserCharacter:
return Token.EVAL_COMPILATION_UNIT_PARSER;
- if (c == EvalUsingDeclarationsParserCharacter)
+ case EvalUsingDeclarationsParserCharacter:
return Token.EVAL_USING_DECLARATIONS_UNIT_PARSER;
-
+ }
+
+ if (is_identifier_start_character (c)) {
+ tokens_seen = true;
+ return consume_identifier (c);
+ }
+
error_details = ((char)c).ToString ();
-
return Token.ERROR;
}
+ if (CompleteOnEOF){
+ if (generated)
+ return Token.COMPLETE_COMPLETION;
+
+ generated = true;
+ return Token.GENERATE_COMPLETION;
+ }
+
+
return Token.EOF;
}
+ int TokenizeBackslash ()
+ {
+ int c = get_char ();
+ tokens_seen = true;
+ if (c == '\'') {
+ error_details = "Empty character literal";
+ Report.Error (1011, Location, error_details);
+ return Token.ERROR;
+ }
+ if (c == '\r' || c == '\n') {
+ Report.Error (1010, Location, "Newline in constant");
+ return Token.ERROR;
+ }
+
+ int d;
+ c = escape (c, out d);
+ if (c == -1)
+ return Token.ERROR;
+ if (d != 0)
+ throw new NotImplementedException ();
+
+ val = (char) c;
+ c = get_char ();
+
+ if (c != '\'') {
+ Report.Error (1012, Location, "Too many characters in character literal");
+
+ // Try to recover, read until newline or next "'"
+ while ((c = get_char ()) != -1) {
+ if (c == '\n' || c == '\'')
+ break;
+ }
+ return Token.ERROR;
+ }
+
+ return Token.LITERAL_CHARACTER;
+ }
+
+ int TokenizeLessThan ()
+ {
+ int d;
+ if (handle_typeof) {
+ PushPosition ();
+ if (parse_generic_dimension (out d)) {
+ val = d;
+ DiscardPosition ();
+ return Token.GENERIC_DIMENSION;
+ }
+ PopPosition ();
+ }
+
+ // Save current position and parse next token.
+ PushPosition ();
+ if (parse_less_than ()) {
+ if (parsing_generic_declaration && token () != Token.DOT) {
+ d = Token.OP_GENERICS_LT_DECL;
+ } else {
+ d = Token.OP_GENERICS_LT;
+ }
+ PopPosition ();
+ return d;
+ }
+
+ PopPosition ();
+ parsing_generic_less_than = 0;
+
+ d = peek_char ();
+ if (d == '<') {
+ get_char ();
+ d = peek_char ();
+
+ if (d == '=') {
+ get_char ();
+ return Token.OP_SHIFT_LEFT_ASSIGN;
+ }
+ return Token.OP_SHIFT_LEFT;
+ }
+
+ if (d == '=') {
+ get_char ();
+ return Token.OP_LE;
+ }
+ return Token.OP_LT;
+ }
+
//
// Handles one line xml comment
//
public void cleanup ()
{
if (ifstack != null && ifstack.Count >= 1) {
- current_location = new Location (ref_line, hidden ? -1 : Col);
int state = (int) ifstack.Pop ();
if ((state & REGION) != 0)
Report.Error (1038, Location, "#endregion directive expected");