}
lex :: proc(src: string) -> [dynamic]TOKEN {
ast := make([dynamic]TOKEN)
sc := &scanner.Scanner{}
sc = scanner.init(sc, src)
for scanner.peek(sc) != scanner.EOF {
switch scanner.peek_token(sc) {
case scanner.Int:
if scanner.peek(sc) == ' ' {
scanner.next(sc)
}
num_runes := make([dynamic]string)
defer delete(num_runes)
for scanner.peek_token(sc) == scanner.Int {
num := fmt.tprintf("%v", scanner.next(sc))
append(&num_runes, num)
}
append(&ast, TOKEN{TOKEN_TYPE.NUMBER, strconv.atoi(strings.concatenate(num_runes[:]))})
case:
token := scanner.next(sc)
switch token {
case '+':
append(&ast, TOKEN{TOKEN_TYPE.OPERATOR, OPERATOR_TYPE.PLUS})
case '-':
append(&ast, TOKEN{TOKEN_TYPE.OPERATOR, OPERATOR_TYPE.MINUS})
case '*':
append(&ast, TOKEN{TOKEN_TYPE.OPERATOR, OPERATOR_TYPE.MULTIPLY})
case '/':
append(&ast, TOKEN{TOKEN_TYPE.OPERATOR, OPERATOR_TYPE.DIVIDE})
case '(':
append(&ast, TOKEN{TOKEN_TYPE.OPEN_PARENTHESIS, -1})
case ')':
append(&ast, TOKEN{TOKEN_TYPE.CLOSE_PARENTHESIS, -1})
}
}
}
return ast
}
shunting_yard :: proc(tokens: [dynamic]TOKEN) -> [dynamic]TOKEN {
ast := make([dynamic]TOKEN)
operator_stack := &queue.Queue(TOKEN){}
queue.init(operator_stack)
for token in tokens {
#partial switch token.type {
case TOKEN_TYPE.NUMBER:
append(&ast, token)
case TOKEN_TYPE.OPERATOR:
if queue.len(operator_stack^) == 0 {
queue.push_back(operator_stack, token)
continue
}
current_token_importance := get_operator_importance(token)
for queue.len(operator_stack^) != 0 {
last_token := queue.peek_back(operator_stack)
last_token_importance := get_operator_importance(last_token^)
// If operators importance is lower than the last ones on the stack (or equal)
// and operator is not ^, pop last operator from stack to ast
if current_token_importance <= last_token_importance {
append(&ast, queue.pop_back(operator_stack))
} else {
//append(&ast, token)
queue.push_back(operator_stack, token)
break
}
}
if queue.len(operator_stack^) == 0 {
queue.push_back(operator_stack, token)
}
case TOKEN_TYPE.OPEN_PARENTHESIS:
queue.push_back(operator_stack, token)
case TOKEN_TYPE.CLOSE_PARENTHESIS:
for queue.peek_back(operator_stack).type != TOKEN_TYPE.OPEN_PARENTHESIS {
append(&ast, queue.pop_back(operator_stack))
}
queue.pop_back(operator_stack)
}
}
for queue.len(operator_stack^) != 0 {
append(&ast, queue.pop_back(operator_stack))
}
return ast
}
get_operator_importance :: proc(operator: TOKEN) -> int {
switch operator.value {
case OPERATOR_TYPE.DIVIDE, OPERATOR_TYPE.MULTIPLY:
// Numbers taken from Wikipedia
return 3
case OPERATOR_TYPE.MINUS, OPERATOR_TYPE.PLUS:
return 2
}
fmt.println("UNREACHABLE")
return 0