adding strlits
This commit is contained in:
46
lexer.h
46
lexer.h
@@ -13,10 +13,11 @@ typedef enum {
|
|||||||
TOKEN_INTEGER,
|
TOKEN_INTEGER,
|
||||||
TOKEN_FLOAT,
|
TOKEN_FLOAT,
|
||||||
TOKEN_SPACE,
|
TOKEN_SPACE,
|
||||||
TOKEN_STRING,
|
TOKEN_STRING, // idx 5
|
||||||
|
TOKEN_IDENTIFIER,
|
||||||
TOKEN_MUL,
|
TOKEN_MUL,
|
||||||
TOKEN_DIV,
|
TOKEN_DIV,
|
||||||
TOKEN_UNKNOWN,
|
TOKEN_UNKNOWN, // idx 9
|
||||||
TOKEN_EOF,
|
TOKEN_EOF,
|
||||||
TOKEN_NEWLINE,
|
TOKEN_NEWLINE,
|
||||||
TOKEN_LPAREN,
|
TOKEN_LPAREN,
|
||||||
@@ -30,6 +31,7 @@ typedef enum {
|
|||||||
BHV_NUMBER,
|
BHV_NUMBER,
|
||||||
BHV_STRING,
|
BHV_STRING,
|
||||||
BHV_FLOAT,
|
BHV_FLOAT,
|
||||||
|
BHV_IDENT,
|
||||||
} symbol_bhv;
|
} symbol_bhv;
|
||||||
|
|
||||||
|
|
||||||
@@ -152,28 +154,36 @@ size_t read_from_tok(Token *tok, const char *input, size_t cursor) {
|
|||||||
} else {
|
} else {
|
||||||
token_push(tok, TOKEN_FLOAT, buf, BHV_FLOAT, cursor - start);
|
token_push(tok, TOKEN_FLOAT, buf, BHV_FLOAT, cursor - start);
|
||||||
}
|
}
|
||||||
} else if (isalpha(input[cursor])) {
|
} else if (isalpha(input[cursor]) && input[cursor] == '"'){
|
||||||
|
cursor++;
|
||||||
|
while(isalpha(input[cursor]) != '"' && input[cursor] != '\0'){
|
||||||
|
buf[i++] = input[cursor++];
|
||||||
|
}
|
||||||
|
buf[i] = '\0';
|
||||||
|
if (input[cursor] == '"') cursor ++;
|
||||||
|
token_push(tok, TOKEN_STRING, buf, BHV_STRING, cursor - start);
|
||||||
|
} else if (isalpha(input[cursor])) { // should be after checking for strlit
|
||||||
while (isalpha(input[cursor])) {
|
while (isalpha(input[cursor])) {
|
||||||
buf[i++] = input[cursor++];
|
buf[i++] = input[cursor++];
|
||||||
}
|
}
|
||||||
buf[i] = '\0';
|
buf[i] = '\0';
|
||||||
token_push(tok, TOKEN_STRING, buf, BHV_STRING, cursor - start);
|
token_push(tok, TOKEN_IDENTIFIER, buf, BHV_IDENT, cursor - start);
|
||||||
//refactor into separate function to use in parsing functions and definitions
|
//refactor into separate function to use in parsing functions and definitions
|
||||||
} else {
|
} else {
|
||||||
buf[0] = input[cursor];
|
buf[0] = input[cursor];
|
||||||
buf[1] = '\0';
|
buf[1] = '\0';
|
||||||
switch (input[cursor]) {
|
switch (input[cursor]) {
|
||||||
case '+': token_push(tok, TOKEN_PLUS, "+", BHV_STACK, 1); break;
|
case '+': token_push(tok, TOKEN_PLUS, "+", BHV_STACK, 1); break;
|
||||||
case '-': token_push(tok, TOKEN_MINUS, "-", BHV_STACK, 1); break;
|
case '-': token_push(tok, TOKEN_MINUS, "-", BHV_STACK, 1); break;
|
||||||
case '*': token_push(tok, TOKEN_MUL, "*", BHV_STACK, 1); break;
|
case '*': token_push(tok, TOKEN_MUL, "*", BHV_STACK, 1); break;
|
||||||
case '/': token_push(tok, TOKEN_DIV, "/", BHV_STACK, 1); break;
|
case '/': token_push(tok, TOKEN_DIV, "/", BHV_STACK, 1); break;
|
||||||
case ' ': token_push(tok, TOKEN_SPACE, " ", BHV_UNDEFINED, 1); break;
|
case ' ': token_push(tok, TOKEN_SPACE, " ", BHV_UNDEFINED, 1); break;
|
||||||
case '\n': token_push(tok, TOKEN_NEWLINE, "\\n", BHV_UNDEFINED, 1); break;
|
case '\n': token_push(tok, TOKEN_NEWLINE, "\\n", BHV_UNDEFINED, 1); break;
|
||||||
case '(': token_push(tok, TOKEN_LPAREN, "(", BHV_STACK, 1); break;
|
case '(': token_push(tok, TOKEN_LPAREN, "(", BHV_STACK, 1); break;
|
||||||
case ')': token_push(tok, TOKEN_RPAREN, ")", BHV_STACK, 1); break;
|
case ')': token_push(tok, TOKEN_RPAREN, ")", BHV_STACK, 1); break;
|
||||||
case ',': token_push(tok, TOKEN_COMMA, ",", BHV_STACK, 1); break;
|
case ',': token_push(tok, TOKEN_COMMA, ",", BHV_STACK, 1); break;
|
||||||
default: token_push(tok, TOKEN_UNKNOWN, buf, BHV_UNDEFINED, 1); break;
|
default: token_push(tok, TOKEN_UNKNOWN, buf, BHV_UNDEFINED, 1); break;
|
||||||
}
|
}
|
||||||
cursor++;
|
cursor++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
11
parser3.c
Normal file
11
parser3.c
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
#include "./lexer.h"
|
||||||
|
|
||||||
|
|
||||||
|
int main(void){
|
||||||
|
const char ts[] = "\"hello\" hi"; // = 3
|
||||||
|
Token tk = tokenize_all(ts);
|
||||||
|
for (size_t i=0; i<tk.size; ++i){
|
||||||
|
printf("TokenNum: %zu Type: %d Value: %s\n", i, tk.type[i], tk.text[i]);
|
||||||
|
}
|
||||||
|
// printf("token count: %zu\n", tk.size);
|
||||||
|
}
|
||||||
@@ -72,16 +72,79 @@ void construct_nodes(ASTTree* a, Token t){
|
|||||||
a->size = nc;
|
a->size = nc;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// void eval(ASTTree *tree){
|
||||||
|
// for (size_t i=0; i<tree->size; ++i){
|
||||||
|
// ASTNode curr = tree->nodes[i];
|
||||||
|
// size_t n = tree->size;
|
||||||
|
// float total = 0.0f;
|
||||||
|
// switch (curr.node){
|
||||||
|
// case TOKEN_PLUS:
|
||||||
|
// if (tree->size > 1){
|
||||||
|
// for (size_t i=0; i<tree->size; ++i){
|
||||||
|
// total += atof(tree->nodes[i].left) + atof(tree->nodes[i].right);
|
||||||
|
// }
|
||||||
|
// // for (size_t i=0; i<n; i++ && n--){
|
||||||
|
// // total -=
|
||||||
|
// // }
|
||||||
|
// }
|
||||||
|
// for (size_t i=0; i<tree->size && (tree->size%i != 0); ++i){
|
||||||
|
// // total -= atof(tree->nodes[i].right);
|
||||||
|
// printf("%zu\n", i);
|
||||||
|
// }
|
||||||
|
|
||||||
|
// printf("%f\n", total);
|
||||||
|
// break;
|
||||||
|
// default:
|
||||||
|
// break;
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
|
||||||
|
|
||||||
|
void eval(ASTTree* tree) {
|
||||||
|
if (!tree || !tree->nodes || tree->size == 0) {
|
||||||
|
fprintf(stderr, "Invalid or empty ASTTree\n");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
float total = 0.0f;
|
||||||
|
int initialized = 0;
|
||||||
|
|
||||||
|
for (size_t i = 0; i < tree->size; ++i) {
|
||||||
|
ASTNode* curr = &tree->nodes[i];
|
||||||
|
|
||||||
|
switch (curr->node) {
|
||||||
|
case TOKEN_PLUS:
|
||||||
|
if (!initialized && curr->left) {
|
||||||
|
total = atof(curr->left);
|
||||||
|
initialized = 1;
|
||||||
|
}
|
||||||
|
if (curr->right)
|
||||||
|
total += atof(curr->right);
|
||||||
|
break;
|
||||||
|
|
||||||
|
|
||||||
|
default:
|
||||||
|
fprintf(stderr, "Unknown token at node %zu\n", i);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
printf("Result: %.2f\n", total);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
int main(int argc, char** argv){
|
int main(int argc, char** argv){
|
||||||
Token tokens = tokenize_all("1+2 3-4 1/2 2*7"); //invalid syntax
|
// Token tokens = tokenize_all("1+2 3-4 1/2 2*7"); //invalid syntax
|
||||||
|
Token tokens = tokenize_all("1+2+3+4"); //invalid syntax
|
||||||
ASTTree tree = {0};
|
ASTTree tree = {0};
|
||||||
construct_nodes(&tree, tokens);
|
construct_nodes(&tree, tokens);
|
||||||
printf("node count: %zu\n", tree.size);
|
eval(&tree);
|
||||||
for (size_t i=0; i<tree.size; ++i){
|
// for (size_t i=0; i<tree.size; ++i){
|
||||||
printf("op: %s, left: %s right: %s\n",
|
// ASTNode curr = tree.nodes[i];
|
||||||
token_type_to_string(tree.nodes[i].node),
|
// printf("token: %s\n", token_type_to_string(curr.node));
|
||||||
tree.nodes[i].left,
|
// printf("left: %s right: %s\n", curr.left, curr.right);
|
||||||
tree.nodes[i].right);
|
// }
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user