OIHG72BUOXN34ADCCDYTCNB3QDCLDWB7EA2XXLIXY254VVITRWZAC
package vm
import "core:container/queue"
import "core:fmt"
import syn "../syntax-tree"
VM :: struct {
code: []syn.TOKEN,
ip: u8,
stack: ^queue.Queue(syn.TOKEN),
}
new_vm :: proc(code: []syn.TOKEN) -> ^VM {
vm: ^VM = new(VM)
vm.code = code
vm.ip = 0
stack := new(queue.Queue(syn.TOKEN))
queue.init(stack)
vm.stack = stack
return vm
}
delete_vm :: proc(vm: ^VM) {
delete(vm.code)
queue.destroy(vm.stack)
free(vm)
}
execute :: proc(vm: ^VM) -> int {
using queue
for token in vm.code {
#partial switch token.type {
case syn.TOKEN_TYPE.NUMBER:
push_back(vm.stack, token)
case syn.TOKEN_TYPE.OPERATOR:
y := pop_back(vm.stack)
x := pop_back(vm.stack)
switch token.value {
case syn.OPERATOR_TYPE.PLUS:
push_back(vm.stack, vm_add(&x, &y))
case syn.OPERATOR_TYPE.MINUS:
push_back(vm.stack, vm_sub(&x, &y))
case syn.OPERATOR_TYPE.MULTIPLY:
push_back(vm.stack, vm_mul(&x, &y))
case syn.OPERATOR_TYPE.DIVIDE:
// Leave out divide for now, because it could be float which is not implemented yet
}
}
}
return pop_back(vm.stack).value.(int)
}
@(private)
vm_add :: #force_inline proc(x: ^syn.TOKEN, y: ^syn.TOKEN) -> syn.TOKEN {
op1: int = x.value.(int)
op2: int = y.value.(int)
return syn.TOKEN{syn.TOKEN_TYPE.NUMBER, op1 + op2}
}
@(private)
vm_sub :: #force_inline proc(x: ^syn.TOKEN, y: ^syn.TOKEN) -> syn.TOKEN {
op1: int = x.value.(int)
op2: int = y.value.(int)
return syn.TOKEN{syn.TOKEN_TYPE.NUMBER, op1 - op2}
}
@(private)
vm_mul :: #force_inline proc(x: ^syn.TOKEN, y: ^syn.TOKEN) -> syn.TOKEN {
op1: int = x.value.(int)
op2: int = y.value.(int)
return syn.TOKEN{syn.TOKEN_TYPE.NUMBER, op1 * op2}
}
@(private)
vm_divide :: #force_inline proc(x: ^syn.TOKEN, y: ^syn.TOKEN) -> syn.TOKEN {
op1: int = x.value.(int)
op2: int = y.value.(int)
return syn.TOKEN{syn.TOKEN_TYPE.NUMBER, op1 / op2}
}
package tokens
TOKEN_TYPE :: enum {
NUMBER,
OPERATOR,
OPEN_PARENTHESIS,
CLOSE_PARENTHESIS,
}
OPERATOR_TYPE :: enum {
PLUS,
MINUS,
MULTIPLY,
DIVIDE,
}
TOKEN :: struct {
type: TOKEN_TYPE,
value: union {
int,
OPERATOR_TYPE,
},
}
package shuntingyard
import "core:container/queue"
import "../lex"
import syn "../syntax-tree/"
shunting_yard :: proc(tokens: [dynamic]syn.TOKEN) -> [dynamic]syn.TOKEN {
ast := make([dynamic]syn.TOKEN)
operator_stack := &queue.Queue(syn.TOKEN){}
queue.init(operator_stack)
for token in tokens {
#partial switch token.type {
case syn.TOKEN_TYPE.NUMBER:
append(&ast, token)
case syn.TOKEN_TYPE.OPERATOR:
if queue.len(operator_stack^) == 0 {
queue.push_back(operator_stack, token)
continue
}
current_token_importance := get_operator_importance(token)
for queue.len(operator_stack^) != 0 {
last_token := queue.peek_back(operator_stack)
last_token_importance := get_operator_importance(last_token^)
// If operators importance is lower than the last ones on the stack (or equal)
// and operator is not ^, pop last operator from stack to ast
if current_token_importance <= last_token_importance {
append(&ast, queue.pop_back(operator_stack))
} else {
//append(&ast, token)
queue.push_back(operator_stack, token)
break
}
}
if queue.len(operator_stack^) == 0 {
queue.push_back(operator_stack, token)
}
case syn.TOKEN_TYPE.OPEN_PARENTHESIS:
queue.push_back(operator_stack, token)
case syn.TOKEN_TYPE.CLOSE_PARENTHESIS:
for queue.peek_back(operator_stack).type != syn.TOKEN_TYPE.OPEN_PARENTHESIS {
append(&ast, queue.pop_back(operator_stack))
}
queue.pop_back(operator_stack)
}
}
for queue.len(operator_stack^) != 0 {
append(&ast, queue.pop_back(operator_stack))
}
return ast
}
get_operator_importance :: proc(operator: syn.TOKEN) -> int {
switch operator.value {
case syn.OPERATOR_TYPE.DIVIDE, syn.OPERATOR_TYPE.MULTIPLY:
// Numbers taken from Wikipedia
return 3
case syn.OPERATOR_TYPE.MINUS, syn.OPERATOR_TYPE.PLUS:
return 2
}
return 0
}
package lex
import "core:fmt"
import "core:strconv"
import "core:strings"
import "core:text/scanner"
import syn "../syntax-tree"
print_ast :: proc(ast: [dynamic]syn.TOKEN) {
for t in ast {
fmt.println(t)
}
}
lex :: proc(src: string) -> [dynamic]syn.TOKEN {
ast := make([dynamic]syn.TOKEN)
sc := &scanner.Scanner{}
sc = scanner.init(sc, src)
for scanner.peek(sc) != scanner.EOF {
switch scanner.peek_token(sc) {
case scanner.Int:
if scanner.peek(sc) == ' ' {
scanner.next(sc)
}
num_runes := make([dynamic]string)
defer delete(num_runes)
for scanner.peek_token(sc) == scanner.Int {
num := fmt.tprintf("%v", scanner.next(sc))
append(&num_runes, num)
}
append(
&ast,
syn.TOKEN{syn.TOKEN_TYPE.NUMBER, strconv.atoi(strings.concatenate(num_runes[:]))},
)
case:
token := scanner.next(sc)
switch token {
case '+':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.PLUS})
case '-':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.MINUS})
case '*':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.MULTIPLY})
case '/':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.DIVIDE})
case '(':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPEN_PARENTHESIS, -1})
case ')':
append(&ast, syn.TOKEN{syn.TOKEN_TYPE.CLOSE_PARENTHESIS, -1})
}
}
}
return ast
}