[vala] Updated genie scanner to handle all new/missing vala tokens and directives
- From: Jamie McCracken <jamiemcc src gnome org>
- To: svn-commits-list gnome org
- Subject: [vala] Updated genie scanner to handle all new/missing vala tokens and directives
- Date: Mon, 13 Apr 2009 20:29:22 -0400 (EDT)
commit cbc2127176905c19229c89ef9c28afc8ca81f41a
Author: Jamie McCracken <jamiemcc gnome org>
Date: Mon Apr 13 20:30:29 2009 -0400
Updated genie scanner to handle all new/missing vala tokens and directives
---
vala/valageniescanner.vala | 310 ++++++++++++++++++++++++++++++++++++++++++
vala/valagenietokentype.vala | 13 ++-
2 files changed, 322 insertions(+), 1 deletions(-)
diff --git a/vala/valageniescanner.vala b/vala/valageniescanner.vala
index 0974306..bf31565 100644
--- a/vala/valageniescanner.vala
+++ b/vala/valageniescanner.vala
@@ -48,6 +48,14 @@ public class Vala.Genie.Scanner {
string _comment;
+ Conditional[] conditional_stack;
+
+ struct Conditional {
+ public bool matched;
+ public bool else_found;
+ public bool skip_section;
+ }
+
public Scanner (SourceFile source_file) {
this.source_file = source_file;
@@ -246,6 +254,9 @@ public class Vala.Genie.Scanner {
break;
}
break;
+ case 'o':
+ if (matches (begin, "owned")) return TokenType.OWNED;
+ break;
case 'p':
if (matches (begin, "print")) return TokenType.PRINT;
break;
@@ -258,6 +269,9 @@ public class Vala.Genie.Scanner {
case 'w':
if (matches (begin, "while")) return TokenType.WHILE;
break;
+ case 'y':
+ if (matches (begin, "yield")) return TokenType.YIELD;
+ break;
}
break;
case 6:
@@ -325,6 +339,9 @@ public class Vala.Genie.Scanner {
case 't':
if (matches (begin, "typeof")) return TokenType.TYPEOF;
break;
+ case 'y':
+ if (matches (begin, "yields")) return TokenType.YIELDS;
+ break;
}
break;
case 7:
@@ -355,6 +372,9 @@ public class Vala.Genie.Scanner {
case 'p':
if (matches (begin, "private")) return TokenType.PRIVATE;
break;
+ case 'u':
+ if (matches (begin, "unowned")) return TokenType.UNOWNED;
+ break;
case 'v':
if (matches (begin, "virtual")) return TokenType.VIRTUAL;
break;
@@ -371,6 +391,9 @@ public class Vala.Genie.Scanner {
case 'd':
if (matches (begin, "delegate")) return TokenType.DELEGATE;
break;
+ case 'i':
+ if (matches (begin, "internal")) return TokenType.INTERNAL;
+ break;
case 'o':
if (matches (begin, "override")) return TokenType.OVERRIDE;
break;
@@ -817,6 +840,34 @@ public class Vala.Genie.Scanner {
case '"':
if (begin[0] == '\'') {
type = TokenType.CHARACTER_LITERAL;
+ } else if (current < end - 6 && begin[1] == '"' && begin[2] == '"') {
+ type = TokenType.VERBATIM_STRING_LITERAL;
+ token_length_in_chars = 6;
+ current += 3;
+ while (current < end - 4) {
+ if (current[0] == '"' && current[1] == '"' && current[2] == '"') {
+ break;
+ } else if (current[0] == '\n') {
+ current++;
+ line++;
+ column = 1;
+ token_length_in_chars = 3;
+ } else {
+ unichar u = ((string) current).get_char_validated ((long) (end - current));
+ if (u != (unichar) (-1)) {
+ current += u.to_utf8 (null);
+ token_length_in_chars++;
+ } else {
+ Report.error (new SourceReference (source_file, line, column + token_length_in_chars, line, column + token_length_in_chars), "invalid UTF-8 character");
+ }
+ }
+ }
+ if (current[0] == '"' && current[1] == '"' && current[2] == '"') {
+ current += 3;
+ } else {
+ Report.error (new SourceReference (source_file, line, column + token_length_in_chars, line, column + token_length_in_chars), "syntax error, expected \"\"\"");
+ }
+ break;
} else {
type = TokenType.STRING_LITERAL;
}
@@ -938,6 +989,12 @@ public class Vala.Genie.Scanner {
current++;
column++;
}
+
+ if ((column == 1) && (current[0] == '#')) {
+ pp_directive ();
+ return true;
+ }
+
return found;
}
@@ -1071,5 +1128,258 @@ public class Vala.Genie.Scanner {
return result_builder.str;
}
+
+ bool pp_whitespace () {
+ bool found = false;
+ while (current < end && current[0].isspace () && current[0] != '\n') {
+ found = true;
+ current++;
+ column++;
+ }
+ return found;
+ }
+
+ void pp_directive () {
+ // hash sign
+ current++;
+ column++;
+
+ pp_whitespace ();
+
+ char* begin = current;
+ int len = 0;
+ while (current < end && current[0].isalnum ()) {
+ current++;
+ column++;
+ len++;
+ }
+
+ if (len == 2 && matches (begin, "if")) {
+ parse_pp_if ();
+ } else if (len == 4 && matches (begin, "elif")) {
+ parse_pp_elif ();
+ } else if (len == 4 && matches (begin, "else")) {
+ parse_pp_else ();
+ } else if (len == 5 && matches (begin, "endif")) {
+ parse_pp_endif ();
+ } else {
+ Report.error (new SourceReference (source_file, line, column - len, line, column), "syntax error, invalid preprocessing directive");
+ }
+
+ if (conditional_stack.length > 0
+ && conditional_stack[conditional_stack.length - 1].skip_section) {
+ // skip lines until next preprocessing directive
+ bool bol = false;
+ while (current < end) {
+ if (bol && current[0] == '#') {
+ // go back to begin of line
+ current -= (column - 1);
+ column = 1;
+ return;
+ }
+ if (current[0] == '\n') {
+ line++;
+ column = 0;
+ bol = true;
+ } else if (!current[0].isspace ()) {
+ bol = false;
+ }
+ current++;
+ column++;
+ }
+ }
+ }
+
+ void pp_eol () {
+ pp_whitespace ();
+ if (current >= end || current[0] != '\n') {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, expected newline");
+ }
+ }
+
+ void parse_pp_if () {
+ pp_whitespace ();
+
+ bool condition = parse_pp_expression ();
+
+ pp_eol ();
+
+ conditional_stack += Conditional ();
+
+ if (condition && (conditional_stack.length == 1 || !conditional_stack[conditional_stack.length - 2].skip_section)) {
+ // condition true => process code within if
+ conditional_stack[conditional_stack.length - 1].matched = true;
+ } else {
+ // skip lines until next preprocessing directive
+ conditional_stack[conditional_stack.length - 1].skip_section = true;
+ }
+ }
+
+ void parse_pp_elif () {
+ pp_whitespace ();
+
+ bool condition = parse_pp_expression ();
+
+ pp_eol ();
+
+ if (conditional_stack.length == 0 || conditional_stack[conditional_stack.length - 1].else_found) {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, unexpected #elif");
+ return;
+ }
+
+ if (condition && !conditional_stack[conditional_stack.length - 1].matched
+ && (conditional_stack.length == 1 || !conditional_stack[conditional_stack.length - 2].skip_section)) {
+ // condition true => process code within if
+ conditional_stack[conditional_stack.length - 1].matched = true;
+ conditional_stack[conditional_stack.length - 1].skip_section = false;
+ } else {
+ // skip lines until next preprocessing directive
+ conditional_stack[conditional_stack.length - 1].skip_section = true;
+ }
+ }
+
+ void parse_pp_else () {
+ pp_eol ();
+
+ if (conditional_stack.length == 0 || conditional_stack[conditional_stack.length - 1].else_found) {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, unexpected #else");
+ return;
+ }
+
+ if (!conditional_stack[conditional_stack.length - 1].matched
+ && (conditional_stack.length == 1 || !conditional_stack[conditional_stack.length - 2].skip_section)) {
+ // condition true => process code within if
+ conditional_stack[conditional_stack.length - 1].matched = true;
+ conditional_stack[conditional_stack.length - 1].skip_section = false;
+ } else {
+ // skip lines until next preprocessing directive
+ conditional_stack[conditional_stack.length - 1].skip_section = true;
+ }
+ }
+
+ void parse_pp_endif () {
+ pp_eol ();
+
+ if (conditional_stack.length == 0) {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, unexpected #endif");
+ return;
+ }
+
+ conditional_stack.length--;
+ }
+
+ bool parse_pp_symbol () {
+ int len = 0;
+ while (current < end && is_ident_char (current[0])) {
+ current++;
+ column++;
+ len++;
+ }
+
+ if (len == 0) {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, expected identifier");
+ return false;
+ }
+
+ string identifier = ((string) (current - len)).ndup (len);
+ bool defined;
+ if (identifier == "true") {
+ defined = true;
+ } else if (identifier == "false") {
+ defined = false;
+ } else {
+ defined = source_file.context.is_defined (identifier);
+ }
+
+ return defined;
+ }
+
+ bool parse_pp_primary_expression () {
+ if (current >= end) {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, expected identifier");
+ } else if (is_ident_char (current[0])) {
+ return parse_pp_symbol ();
+ } else if (current[0] == '(') {
+ current++;
+ column++;
+ pp_whitespace ();
+ bool result = parse_pp_expression ();
+ pp_whitespace ();
+ if (current < end && current[0] == ')') {
+ current++;
+ column++;
+ } else {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, expected `)'");
+ }
+ return result;
+ } else {
+ Report.error (new SourceReference (source_file, line, column, line, column), "syntax error, expected identifier");
+ }
+ return false;
+ }
+
+ bool parse_pp_unary_expression () {
+ if (current < end && current[0] == '!') {
+ current++;
+ column++;
+ pp_whitespace ();
+ return !parse_pp_unary_expression ();
+ }
+
+ return parse_pp_primary_expression ();
+ }
+
+ bool parse_pp_equality_expression () {
+ bool left = parse_pp_unary_expression ();
+ pp_whitespace ();
+ while (true) {
+ if (current < end - 1 && current[0] == '=' && current[1] == '=') {
+ current += 2;
+ column += 2;
+ pp_whitespace ();
+ bool right = parse_pp_unary_expression ();
+ left = (left == right);
+ } else if (current < end - 1 && current[0] == '!' && current[1] == '=') {
+ current += 2;
+ column += 2;
+ pp_whitespace ();
+ bool right = parse_pp_unary_expression ();
+ left = (left != right);
+ } else {
+ break;
+ }
+ }
+ return left;
+ }
+
+ bool parse_pp_and_expression () {
+ bool left = parse_pp_equality_expression ();
+ pp_whitespace ();
+ while (current < end - 1 && current[0] == '&' && current[1] == '&') {
+ current += 2;
+ column += 2;
+ pp_whitespace ();
+ bool right = parse_pp_equality_expression ();
+ left = left && right;
+ }
+ return left;
+ }
+
+ bool parse_pp_or_expression () {
+ bool left = parse_pp_and_expression ();
+ pp_whitespace ();
+ while (current < end - 1 && current[0] == '|' && current[1] == '|') {
+ current += 2;
+ column += 2;
+ pp_whitespace ();
+ bool right = parse_pp_and_expression ();
+ left = left || right;
+ }
+ return left;
+ }
+
+ bool parse_pp_expression () {
+ return parse_pp_or_expression ();
+ }
}
diff --git a/vala/valagenietokentype.vala b/vala/valagenietokentype.vala
index a2fd0ca..5193c61 100644
--- a/vala/valagenietokentype.vala
+++ b/vala/valagenietokentype.vala
@@ -91,6 +91,7 @@ public enum Vala.Genie.TokenType {
INLINE,
INTEGER_LITERAL,
INTERFACE,
+ INTERNAL,
INTERR,
IS,
ISA,
@@ -120,6 +121,7 @@ public enum Vala.Genie.TokenType {
OPEN_BRACKET,
OPEN_PARENS,
OVERRIDE,
+ OWNED,
PASS,
PERCENT,
PLUS,
@@ -149,15 +151,19 @@ public enum Vala.Genie.TokenType {
TRUE,
TRY,
TYPEOF,
+ UNOWNED,
USES,
VAR,
+ VERBATIM_STRING_LITERAL,
VIRTUAL,
VOID,
VOLATILE,
WEAK,
WHEN,
WHILE,
- WRITEONLY;
+ WRITEONLY,
+ YIELD,
+ YIELDS;
public weak string to_string () {
switch (this) {
@@ -227,6 +233,7 @@ public enum Vala.Genie.TokenType {
case INLINE: return "`inline'";
case INTEGER_LITERAL: return "integer literal";
case INTERFACE: return "`interface'";
+ case INTERNAL: return "`internal'";
case INTERR: return "`?'";
case IS: return "`is'";
case ISA: return "`isa'";
@@ -256,6 +263,7 @@ public enum Vala.Genie.TokenType {
case OPEN_BRACKET: return "`['";
case OPEN_PARENS: return "`('";
case OVERRIDE: return "`override'";
+ case OWNED: return "`owned'";
case PASS: return "`pass'";
case PERCENT: return "`%'";
case PLUS: return "`+'";
@@ -285,6 +293,7 @@ public enum Vala.Genie.TokenType {
case TRUE: return "`true'";
case TRY: return "`try'";
case TYPEOF: return "`typeof'";
+ case UNOWNED: return "`unowned'";
case USES: return "`uses'";
case VAR: return "`var'";
case VIRTUAL: return "`virtual'";
@@ -294,6 +303,8 @@ public enum Vala.Genie.TokenType {
case WHEN: return "`when'";
case WHILE: return "`while'";
case WRITEONLY: return "`writeonly'";
+ case YIELD: return "`yield'";
+ case YIELDS: return "`yields'";
default: return "unknown token";
}
}
[
Date Prev][
Date Next] [
Thread Prev][
Thread Next]
[
Thread Index]
[
Date Index]
[
Author Index]