|
@@ -36,6 +36,7 @@ static void read_ident(lexer_t *lexer);
|
|
static void read_comment(lexer_t *lexer);
|
|
static void read_comment(lexer_t *lexer);
|
|
static void read_long_comment(lexer_t *lexer);
|
|
static void read_long_comment(lexer_t *lexer);
|
|
|
|
|
|
|
|
+/* string => ketword_t "map" for detecting keywords */
|
|
static keyword_t keywords[] = {
|
|
static keyword_t keywords[] = {
|
|
{FUNCTION_KEYWORD , TOKEN_FUNCTION},
|
|
{FUNCTION_KEYWORD , TOKEN_FUNCTION},
|
|
{TRUE_KEYWORD , TOKEN_TRUE},
|
|
{TRUE_KEYWORD , TOKEN_TRUE},
|
|
@@ -77,6 +78,13 @@ lexer_t *tokenize(char *target_file, char *source)
|
|
case '"': read_string(lexer, '"'); break;
|
|
case '"': read_string(lexer, '"'); break;
|
|
case '\'': read_string(lexer, '\''); break;
|
|
case '\'': read_string(lexer, '\''); break;
|
|
case '\n': ++lexer->line_n; break;
|
|
case '\n': ++lexer->line_n; break;
|
|
|
|
+ case '_':
|
|
|
|
+ if (is_ident(peek(lexer))) {
|
|
|
|
+ read_ident(lexer);
|
|
|
|
+ } else {
|
|
|
|
+ emit_token(lexer, TOKEN_UNDERSCORE);
|
|
|
|
+ }
|
|
|
|
+ break;
|
|
case '+':
|
|
case '+':
|
|
if (peek(lexer) == '=') {
|
|
if (peek(lexer) == '=') {
|
|
eat(lexer);
|
|
eat(lexer);
|
|
@@ -89,6 +97,9 @@ lexer_t *tokenize(char *target_file, char *source)
|
|
if (peek(lexer) == '=') {
|
|
if (peek(lexer) == '=') {
|
|
eat(lexer);
|
|
eat(lexer);
|
|
emit_token(lexer, TOKEN_MIN_EQ);
|
|
emit_token(lexer, TOKEN_MIN_EQ);
|
|
|
|
+ } else if (peek(lexer) == '>'){
|
|
|
|
+ eat(lexer);
|
|
|
|
+ emit_token(lexer, TOKEN_R_ARROW);
|
|
} else {
|
|
} else {
|
|
emit_token(lexer, TOKEN_MIN);
|
|
emit_token(lexer, TOKEN_MIN);
|
|
}
|
|
}
|
|
@@ -105,26 +116,22 @@ lexer_t *tokenize(char *target_file, char *source)
|
|
if (peek(lexer) == '&') {
|
|
if (peek(lexer) == '&') {
|
|
eat(lexer);
|
|
eat(lexer);
|
|
emit_token(lexer, TOKEN_AND);
|
|
emit_token(lexer, TOKEN_AND);
|
|
|
|
+ } else if (peek(lexer) == '=') {
|
|
|
|
+ eat(lexer);
|
|
|
|
+ emit_token(lexer, TOKEN_B_AND_EQ);
|
|
} else {
|
|
} else {
|
|
- if (peek(lexer) == '=') {
|
|
|
|
- eat(lexer);
|
|
|
|
- emit_token(lexer, TOKEN_B_AND_EQ);
|
|
|
|
- } else {
|
|
|
|
- emit_token(lexer, TOKEN_B_AND);
|
|
|
|
- }
|
|
|
|
|
|
+ emit_token(lexer, TOKEN_B_AND);
|
|
}
|
|
}
|
|
break;
|
|
break;
|
|
case '|':
|
|
case '|':
|
|
if (peek(lexer) == '|') {
|
|
if (peek(lexer) == '|') {
|
|
eat(lexer);
|
|
eat(lexer);
|
|
emit_token(lexer, TOKEN_OR);
|
|
emit_token(lexer, TOKEN_OR);
|
|
|
|
+ } else if (peek(lexer) == '=') {
|
|
|
|
+ eat(lexer);
|
|
|
|
+ emit_token(lexer, TOKEN_B_OR_EQ);
|
|
} else {
|
|
} else {
|
|
- if (peek(lexer) == '=') {
|
|
|
|
- eat(lexer);
|
|
|
|
- emit_token(lexer, TOKEN_B_OR_EQ);
|
|
|
|
- } else {
|
|
|
|
- emit_token(lexer, TOKEN_B_OR);
|
|
|
|
- }
|
|
|
|
|
|
+ emit_token(lexer, TOKEN_B_OR);
|
|
}
|
|
}
|
|
break;
|
|
break;
|
|
case '~':
|
|
case '~':
|