public class Tokenizer : yyParser.yyInput
{
+ class KeywordEntry
+ {
+ public readonly int Token;
+ public KeywordEntry Next;
+ public readonly char[] Value;
+
+ public KeywordEntry (string value, int token)
+ {
+ this.Value = value.ToCharArray ();
+ this.Token = token;
+ }
+ }
+
SeekableStreamReader reader;
SourceFile ref_name;
CompilationUnit file_name;
// scope only
//
public int parsing_block;
- internal int query_parsing;
+ internal bool query_parsing;
//
// When parsing type only, useful for ambiguous nullable types
//
bool tokens_seen = false;
+ //
+ // Set to true once the GENERATE_COMPLETION token has bee
+ // returned. This helps produce one GENERATE_COMPLETION,
+ // as many COMPLETE_COMPLETION as necessary to complete the
+ // AST tree and one final EOF.
+ //
+ bool generated;
+
//
// Whether a token has been seen on the file
// This is needed because `define' is not allowed to be used
}
}
+ //
+ // This is used to trigger completion generation on the parser
+ public bool CompleteOnEOF;
+
void AddEscapedIdentifier (LocatedToken lt)
{
if (escaped_identifiers == null)
//
// Class variables
//
- static CharArrayHashtable[] keywords;
+ static KeywordEntry[][] keywords;
static Hashtable keyword_strings;
static NumberStyles styles;
static NumberFormatInfo csharp_format_info;
static void AddKeyword (string kw, int token)
{
keyword_strings.Add (kw, kw);
- if (keywords [kw.Length] == null) {
- keywords [kw.Length] = new CharArrayHashtable (kw.Length);
+
+ int length = kw.Length;
+ if (keywords [length] == null) {
+ keywords [length] = new KeywordEntry ['z' - '_' + 1];
+ }
+
+ int char_index = kw [0] - '_';
+ KeywordEntry kwe = keywords [length] [char_index];
+ if (kwe == null) {
+ keywords [length] [char_index] = new KeywordEntry (kw, token);
+ return;
+ }
+
+ while (kwe.Next != null) {
+ kwe = kwe.Next;
}
- keywords [kw.Length] [kw.ToCharArray ()] = token;
+
+ kwe.Next = new KeywordEntry (kw, token);
}
static void InitTokens ()
{
keyword_strings = new Hashtable ();
- keywords = new CharArrayHashtable [64];
+
+ // 11 is the length of the longest keyword for now
+ keywords = new KeywordEntry [11] [];
AddKeyword ("__arglist", Token.ARGLIST);
AddKeyword ("abstract", Token.ABSTRACT);
//
static Tokenizer ()
{
- Reset ();
- }
-
- public static void Reset ()
- {
- InitTokens ();
+ InitTokens ();
csharp_format_info = NumberFormatInfo.InvariantInfo;
styles = NumberStyles.Float;
int GetKeyword (char[] id, int id_len)
{
- /*
- * Keywords are stored in an array of hashtables grouped by their
- * length.
- */
+ //
+ // Keywords are stored in an array of arrays grouped by their
+ // length and then by the first character
+ //
+ if (id_len >= keywords.Length || keywords [id_len] == null)
+ return -1;
- if ((id_len >= keywords.Length) || (keywords [id_len] == null))
+ int first_index = id [0] - '_';
+ if (first_index > 'z')
return -1;
- object o = keywords [id_len] [id];
- if (o == null)
+ KeywordEntry kwe = keywords [id_len] [first_index];
+ if (kwe == null)
+ return -1;
+
+ int res;
+ do {
+ res = kwe.Token;
+ for (int i = 1; i < id_len; ++i) {
+ if (id [i] != kwe.Value [i]) {
+ res = 0;
+ break;
+ }
+ }
+ kwe = kwe.Next;
+ } while (kwe != null && res == 0);
+
+ if (res == 0)
return -1;
int next_token;
- int res = (int) o;
switch (res) {
case Token.GET:
case Token.SET:
}
break;
case Token.WHERE:
- if (!handle_where && query_parsing == 0)
+ if (!handle_where && !query_parsing)
res = -1;
break;
case Token.FROM:
// A query expression is any expression that starts with `from identifier'
// followed by any token except ; , =
//
- if (query_parsing == 0) {
+ if (!query_parsing) {
if (lambda_arguments_parsing) {
res = -1;
break;
if (next_token == Token.SEMICOLON || next_token == Token.COMMA || next_token == Token.EQUALS)
goto default;
- ++query_parsing;
+ res = Token.FROM_FIRST;
+ query_parsing = true;
if (RootContext.Version <= LanguageVersion.ISO_2)
Report.FeatureIsNotAvailable (Location, "query expressions");
break;
case Token.ASCENDING:
case Token.DESCENDING:
case Token.INTO:
- if (query_parsing == 0)
+ if (!query_parsing)
res = -1;
break;
static bool is_identifier_part_character (char c)
{
- return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || c == '_' || (c >= '0' && c <= '9') ||
- Char.IsLetter (c) || Char.GetUnicodeCategory (c) == UnicodeCategory.ConnectorPunctuation;
+ if (c >= 'a' && c <= 'z')
+ return true;
+
+ if (c >= 'A' && c <= 'Z')
+ return true;
+
+ if (c == '_' || (c >= '0' && c <= '9'))
+ return true;
+
+ if (c < 0x80)
+ return false;
+
+ return Char.IsLetter (c) || Char.GetUnicodeCategory (c) == UnicodeCategory.ConnectorPunctuation;
}
public static bool IsKeyword (string s)
the_token = token ();
} while (the_token != Token.CLOSE_BRACKET);
the_token = token ();
+ } else if (the_token == Token.IN || the_token == Token.OUT) {
+ the_token = token ();
}
switch (the_token) {
case Token.IDENTIFIER:
case Token.CHAR:
case Token.VOID:
break;
-
case Token.OP_GENERICS_GT:
return true;
public bool advance ()
{
- return peek_char () != -1;
+ return peek_char () != -1 || CompleteOnEOF;
}
public Object Value {
return Token.OPEN_PARENS;
}
+ // Optimize using peek
+ int xx = peek_char ();
+ switch (xx) {
+ case '(':
+ case '\'':
+ case '"':
+ case '0':
+ case '1':
+ return Token.OPEN_PARENS;
+ }
+
lambda_arguments_parsing = true;
PushPosition ();
d = TokenizeOpenParens ();
}
return Token.OP_GT;
-
+
case '+':
d = peek_char ();
if (d == '+') {
error_details = ((char)c).ToString ();
return Token.ERROR;
}
+
+ if (CompleteOnEOF){
+ if (generated)
+ return Token.COMPLETE_COMPLETION;
+
+ generated = true;
+ return Token.GENERATE_COMPLETION;
+ }
+
return Token.EOF;
}