package lex

import "core:fmt"
import "core:os"
import "core:strconv"
import "core:strings"
import "core:text/scanner"

import syn "../syntax-tree"

print_ast :: proc(ast: [dynamic]syn.TOKEN) {
	for t in ast {
		fmt.println(t)
	}
}

lex :: proc(src: string) -> []syn.TOKEN {
	ast := make([dynamic]syn.TOKEN)
	sc := &scanner.Scanner{}
	sc = scanner.init(sc, src)
	for scanner.peek(sc) != scanner.EOF {
		switch scanner.peek_token(sc) {
		case scanner.Int:
			if scanner.peek(sc) == ' ' {
				scanner.next(sc)
			}
			num_runes := make([dynamic]string)
			defer delete(num_runes)
			for scanner.peek_token(sc) == scanner.Int {
				num := fmt.tprintf("%v", scanner.next(sc))
				append(&num_runes, num)
			}
			num, ok := strconv.parse_f64(strings.concatenate(num_runes[:]))
			if !ok {
				fmt.println("Could not parse f64")
				os.exit(1)
			}
			append(&ast, syn.TOKEN{syn.TOKEN_TYPE.NUMBER, num})
		case:
			token := scanner.next(sc)
			switch token {
			case '+':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.PLUS})
			case '-':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.MINUS})
			case '*':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.MULTIPLY})
			case '/':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPERATOR, syn.OPERATOR_TYPE.DIVIDE})
			case '(':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.OPEN_PARENTHESIS, -1})
			case ')':
				append(&ast, syn.TOKEN{syn.TOKEN_TYPE.CLOSE_PARENTHESIS, -1})
			}
		}
	}

	return ast[:]
}