Compare commits

...

1 Commits
master ... sly

Author SHA1 Message Date
Sven Heidemann
93e54c984d [DevState] testing with rply 2020-09-27 18:20:12 +02:00
15 changed files with 179 additions and 889 deletions

View File

@ -1,16 +0,0 @@
use test1 from Tests;
use test2 as test3 from Tests;
lib Main {
class Program {
func Main(args: list): void {
var test_a: test1 = test1();
test_a.dec_vars();
test_a.is_error();
if (!error) {
var test_b: test3 = test3();
test3.continue();
}
}
}
}

View File

@ -1,35 +0,0 @@
lib Tests
{
/*
declaration of some tests
*/
public class test1
{
public var test_string: string = 'Hello';
public var test_string_2: string = "Hello World";
public var test_num: num = 1;
public var test_num_2: num = 1.0;
public var test_num_3: num = this.test_num + this.test_num_2;
public func dec_vars(): void
{
var test_bool: bool = true;
var test_bool_2: bool = false;
var test_bool_3: bool = test_bool != test_bool_2; # true
}
public is_error(): bool
{
if (error != empty)
{
output(error.code + ' ' + error.message);
return true;
}
else
{
output('continue');
return false;
}
}
}
}

View File

@ -1,12 +0,0 @@
lib Tests {
public class test2 {
var string_a = strings();
public func continue(): void {
input(string_a.string1 + ': ');
}
}
class strings {
public var string1 = "hello world";
}
}

View File

@ -1,31 +0,0 @@
from typing import Optional
from CCLang_sly.Parser import Parser
from Interpreter.Validator import Validator
from CCLang_sly.Lexer import Lexer
from Interpreter.Repo import Repo
from Interpreter.Utils import Utils
from Models.AbstractSyntaxTree.AbstractSyntaxTree import AbstractSyntaxTree
class Interpreter:
def __init__(self, repo: Repo, utils: Utils) -> None:
self.__repo = repo
self.__utils = utils
# self.__lexer = Lexer(repo, utils)
# self.__parser = Parser(repo, utils)
# self.__validator = Validator(repo, utils)
def interpret(self, line_str: str) -> None:
"""
Interprets code line
:param line_str:
:return:
"""
lexer = Lexer()
parser = Parser()
env = {}
ast = parser.parse(lexer.tokenize(line_str))
if ast is not None:
print(ast)

View File

@ -1,108 +0,0 @@
from sly import Lexer as SlyLexer
from Models.CCLang.TokenDefinition import TokenDefinition
class Lexer(SlyLexer):
# Ignored pattern
ignore = '\t '
# ignore_comment = r'(/\*([^*]|[\r\n]|(\*+([^*/]|[\r\n])))*\*+/)|(//.*)|([#].*)'
ignore_comment = r'([#].*|(//.*))'
ignore_newline = r'\n+'
tokens = {
LIBRARY,
CLASS,
FUNCTION,
VARIABLE,
USE,
FROM,
OUTPUT,
INPUT,
LENGTH,
RANGE,
EXIT,
IF,
ELSEIF,
ELSE,
CONTINUE,
IN,
RETURN,
WHILE,
FOR,
PUBLIC,
THIS,
LBRACE,
RBRACE,
LPARAN,
RPARAN,
LBRACKET,
RBRACKET,
SEMICOLON,
COLON,
COMMA,
POINT,
PLUS,
MINUS,
ASTERIK,
SLASH,
EQUAL,
CARET,
TRUE,
FALSE,
STRING,
NUMBER,
EMPTY,
NAME
}
# token definition
LIBRARY = TokenDefinition.Library.value
CLASS = TokenDefinition.Class.value
FUNCTION = TokenDefinition.Function.value
VARIABLE = TokenDefinition.Variable.value
USE = TokenDefinition.Use.value
FROM = TokenDefinition.From.value
OUTPUT = TokenDefinition.Output.value
INPUT = TokenDefinition.Input.value
LENGTH = TokenDefinition.Length.value
RANGE = TokenDefinition.Range.value
EXIT = TokenDefinition.Exit.value
IF = TokenDefinition.If.value
ELSEIF = TokenDefinition.ElseIf.value
ELSE = TokenDefinition.Else.value
CONTINUE = TokenDefinition.Continue.value
IN = TokenDefinition.In.value
RETURN = TokenDefinition.Return.value
WHILE = TokenDefinition.While.value
FOR = TokenDefinition.For.value
PUBLIC = TokenDefinition.Public.value
THIS = TokenDefinition.This.value
LBRACE = TokenDefinition.LeftBrace.value
RBRACE = TokenDefinition.RightBrace.value
LPARAN = TokenDefinition.LeftParenthesis.value
RPARAN = TokenDefinition.RightParenthesis.value
LBRACKET = TokenDefinition.LeftBracket.value
RBRACKET = TokenDefinition.RightBracket.value
SEMICOLON = TokenDefinition.Semicolon.value
COLON = TokenDefinition.Colon.value
COMMA = TokenDefinition.Comma.value
POINT = TokenDefinition.Point.value
PLUS = TokenDefinition.Plus.value
MINUS = TokenDefinition.Minus.value
ASTERIK = TokenDefinition.Asterisk.value
SLASH = TokenDefinition.Slash.value
EQUAL = TokenDefinition.Equal.value
CARET = TokenDefinition.Caret.value
TRUE = TokenDefinition.BoolTrue.value
FALSE = TokenDefinition.BoolFalse.value
STRING = TokenDefinition.String.value
NUMBER = TokenDefinition.Number.value
EMPTY = TokenDefinition.Empty.value
NAME = TokenDefinition.Name.value
def error(self, t):
print("Illegal character '%s'" % t.value[0])
# self.index += 1

View File

@ -1,61 +0,0 @@
from sly import Parser as SlyParser
from CCLang_sly.Lexer import Lexer
class Parser(SlyParser):
tokens = Lexer.tokens
#precedence = (
# ('left', '+', '-'),
# ('left', '*', '/'),
# ('right', 'UMINUS'),
#)
def __init__(self):
self.env = {}
@_('')
def statement(self, p):
pass
def error(self, p):
print(f'ERROR: {p}')
pass
# lib definition
@_('PUBLIC LIBRARY NAME LBRACE')
def statement(self, p):
return ('lib_def', p.NAME, True)
@_('LIBRARY NAME LBRACE')
def statement(self, p):
return ('lib_def', p.NAME, False)
# class definition
@_('PUBLIC CLASS NAME LBRACE')
def statement(self, p):
return ('class_def', p.NAME, True)
@_('CLASS NAME LBRACE')
def statement(self, p):
return ('class_def', p.NAME, False)
# func definition
@_('PUBLIC FUNCTION NAME LPARAN statement RPARAN COLON type LBRACE')
def statement(self, p):
return ('func_def', p.NAME, True)
@_('FUNCTION NAME LPARAN RPARAN COLON type LBRACE')
def statement(self, p):
return ('func_def', p.NAME, False)
# types
@_('EMPTY')
def type(self, p):
return ('type', p.EMPTY)
# right brace
@_('RBRACE')
def statement(self, p):
return ('end', p.RBRACE)

View File

@ -1,22 +0,0 @@
# hi1
// hi2
public lib Main {
class Program {
var test: bool = false;
var test2: Program2 = empty;
var test3: Program2 = Test(34);
func Main(): empty {
#func Main {
var hallo: any;
#output('Hello');
#output(this.isTrue(false));
}
func isTrue(value: bool): bool {
#func isTrue {
#return value;
}
}
}

View File

@ -0,0 +1,59 @@
from rply.token import BaseBox
class AbstractSyntaxTree:
class BinaryOp(BaseBox):
def __init__(self, left, right):
self.left = left
self.right = right
class Number(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class String(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class Bool(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class Add(BinaryOp):
def eval(self):
return self.left.eval() + self.right.eval()
class Sub(BinaryOp):
def eval(self):
return self.left.eval() - self.right.eval()
class Mul(BinaryOp):
def eval(self):
return self.left.eval() * self.right.eval()
class Div(BinaryOp):
def eval(self):
return self.left.eval() / self.right.eval()
class Output(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
print('>>', self.value.eval())
class Input(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return input(f'<< {self.value.eval()}')

View File

@ -13,8 +13,10 @@ class Interpreter:
def __init__(self, repo: Repo, utils: Utils) -> None: def __init__(self, repo: Repo, utils: Utils) -> None:
self.__repo = repo self.__repo = repo
self.__utils = utils self.__utils = utils
self.__lexer = Lexer(repo, utils) self.__lexer = Lexer().get_lexer()
self.__parser = Parser(repo, utils) parser = Parser()
parser.parse()
self.__parser = parser.get_parser()
self.__validator = Validator(repo, utils) self.__validator = Validator(repo, utils)
def interpret(self, line_str: str) -> None: def interpret(self, line_str: str) -> None:
@ -23,35 +25,9 @@ class Interpreter:
:param line_str: :param line_str:
:return: :return:
""" """
tokens = [] tokens = self.__lexer.lex(line_str)
ast: Optional[AbstractSyntaxTree] = None #for t in tokens:
# print(t)
if self.__repo.error is None: ast = self.__parser.parse(tokens)
tokens = self.__lexer.tokenize(line_str) ast.eval()
if self.__repo.error is None:
ast = self.__parser.parse(tokens)
""" print('#####\n')
if ast is not None:
for lib in ast.libraries:
print('lib', lib.name)
for cl in lib.classes:
print('class', cl.name)
for var in cl.variables:
print('cl var', var.name)
for func in cl.functions:
print('func', func.name)
for arg in func.args:
print('func arg', arg.name)
for var in func.variables:
print('func var', var.name)
for ins in func.instructions:
print('ins', ins)
"""
# if self.__repo.is_error is None:
# self.__validator.validate(self.__repo.AST)

View File

@ -1,171 +1,19 @@
from typing import List from rply import LexerGenerator
from Interpreter.Repo import Repo from Models.CCLang.TokenDefinition import TokenDefinition
from Interpreter.Utils import Utils
from Models.Token.Token import Token
from Models.Token.TokenTypes import TokenTypes, UnresolvedTokenTypes
class Lexer: class Lexer:
def __init__(self):
self.lexer = LexerGenerator()
def __init__(self, repo: Repo, utils: Utils) -> None: def _add_tokens(self):
self.__repo = repo for t in TokenDefinition:
self.__utils = utils self.lexer.add(t.name, t.value)
self.__is_ml_comment = False # Ignore spaces
self.lexer.ignore('\s+')
def __add_tok(self, tokens: List[Token], value: str, input_token_type: UnresolvedTokenTypes) -> None: def get_lexer(self):
""" self._add_tokens()
Creates token object return self.lexer.build()
:param value:
:param input_token_type:
:return:
"""
token_type: TokenTypes = TokenTypes.Empty
if value != '':
if input_token_type == UnresolvedTokenTypes.Word:
if value in self.__repo.keywords:
token_type = TokenTypes.Keyword
elif value in self.__repo.datatypes:
token_type = TokenTypes.Type
elif value in self.__repo.bool_values:
token_type = TokenTypes.Bool
elif value == UnresolvedTokenTypes.Empty:
token_type = TokenTypes.Empty
else:
token_type = TokenTypes.Name
elif input_token_type == UnresolvedTokenTypes.Number:
token_type = TokenTypes.Number
elif input_token_type == UnresolvedTokenTypes.String:
token_type = TokenTypes.String
elif input_token_type == UnresolvedTokenTypes.Expression_Character:
token_type = TokenTypes.Expression_Character
elif input_token_type == UnresolvedTokenTypes.Bool_Expression_Character:
token_type = TokenTypes.Bool_Expression_Character
elif input_token_type == UnresolvedTokenTypes.Format_Character:
token_type = TokenTypes.Format_Character
tokens.append(Token(token_type, value))
def tokenize(self, line: str) -> List[Token]:
"""
Creates token list from code line
:param line:
:return:
"""
tokens: List[Token] = []
word = ''
ol_comment = False
is_string1 = False # 'hello'
is_string2 = False # "hello"
is_number = False
is_expr_char = False
for i in range(0, len(line)):
c = line[i]
# ignore comments and spaces
if not ol_comment and not self.__is_ml_comment:
# comment filtering
if c == '#' and not is_string1 and not is_string2:
ol_comment = True
elif line[i - 1] == '/' and c == '/':
ol_comment = True
elif line[i - 1] == '/' and c == '*':
self.__is_ml_comment = True
i += 2
# end of number
elif not c.isdigit() and c != '.' and is_number:
self.__add_tok(tokens, word, UnresolvedTokenTypes.Number)
local_tokens = self.tokenize(c)
for local_token in local_tokens:
tokens.append(local_token)
word = ''
is_number = False
# end of expression char
elif c not in self.__repo.expr_chars and is_expr_char:
self.__add_tok(tokens, word, UnresolvedTokenTypes.Expression_Character)
word = ''
is_expr_char = False
# begin of is_string1
elif c == '\'' and not is_string1:
is_string1 = True
word = ''
# end of is_string1
elif c == '\'' and is_string1:
is_string1 = False
self.__add_tok(tokens, word, UnresolvedTokenTypes.String)
word = ''
# begin of is_string2
elif c == '\"' and not is_string2:
is_string2 = True
word = ''
# end of is_string2
elif c == '\"' and is_string2:
is_string2 = False
self.__add_tok(tokens, word, UnresolvedTokenTypes.String)
word = ''
# format char
elif c in self.__repo.format_chars:
self.__add_tok(tokens, word, UnresolvedTokenTypes.Word)
self.__add_tok(tokens, c, UnresolvedTokenTypes.Format_Character)
word = ''
# begin of number
elif c.isdigit() and not is_number and word == '':
word += c
is_number = True
# continue number
elif (c.isdigit() or c == '.') and is_number:
word += c
# begin expression char
elif c in self.__repo.expr_chars and not is_expr_char:
word += c
is_expr_char = True
# continue expression char
elif c in self.__repo.expr_chars and is_expr_char:
word += c
# bool expression char
elif c in self.__repo.bool_expr_chars:
self.__add_tok(tokens, word, UnresolvedTokenTypes.Word)
self.__add_tok(tokens, c, UnresolvedTokenTypes.Bool_Expression_Character)
word = ''
# end of word
elif c == ' ' and not is_string1 and not is_string2 or c == '\n':
self.__add_tok(tokens, word, UnresolvedTokenTypes.Word)
word = ''
else:
word += c
if c == '\n' and ol_comment:
ol_comment = False
if line[i - 1] == '*' and c == '/':
self.__is_ml_comment = False
return tokens

View File

@ -1,415 +1,106 @@
from typing import List, Optional, Union from rply import ParserGenerator
from Interpreter.AbstractSyntaxTree import AbstractSyntaxTree
from Interpreter.Repo import Repo from Models.CCLang.TokenDefinition import TokenDefinition
from Interpreter.Utils import Utils
from Models.AbstractSyntaxTree.AbstractSyntaxTree import LibraryDefinitionNode, ClassDefinitionNode, AbstractSyntaxTree, \
FunctionDefinitionNode, VariableDefinitionNode, CallDefinitionNode, ValueNode, ASTElement
from Models.Interpreter.Datatypes import Datatypes
from Models.Interpreter.Error import Error, ErrorCodes
from Models.Token.Token import Token
from Models.Token.TokenTypes import TokenTypes
from Models.Token.TokenValueTypes import Keywords, FormatCharacters, ExpressionCharacters
class Parser: class Parser:
def __init__(self):
tokens = []
for t in TokenDefinition:
tokens.append(t.name)
def __init__(self, repo: Repo, utils: Utils) -> None: self.pg = ParserGenerator(
self.__repo = repo # A list of all token names accepted by the parser.
self.__utils = utils [
TokenDefinition.ValueNumber.name,
TokenDefinition.Plus.name,
TokenDefinition.Minus.name,
TokenDefinition.Asterisk.name,
TokenDefinition.Slash.name,
TokenDefinition.LeftParenthesis.name,
TokenDefinition.RightParenthesis.name,
TokenDefinition.Output.name,
TokenDefinition.Semicolon.name,
TokenDefinition.ValueString.name,
TokenDefinition.BoolTrue.name,
TokenDefinition.BoolFalse.name,
TokenDefinition.Input.name
],
precedence=[
('left', ['Plus', 'Minus']),
('left', ['Slash', 'Asterisk'])
]
)
self.__ast: AbstractSyntaxTree = AbstractSyntaxTree() def parse(self):
self.__saved_tokens: List[Token] = [] #@self.pg.production('input : Input LeftParenthesis expression RightParenthesis Semicolon')
self.__expected_tokens: List[Token] = [] #def statement(p):
# return AbstractSyntaxTree.Input(p[2])
# for validation if type is created @self.pg.production('builtin : Output LeftParenthesis expression RightParenthesis Semicolon')
self.__is_saving_type = False @self.pg.production('builtin : Input LeftParenthesis expression RightParenthesis Semicolon')
self.__is_saving_value = False def builtin(p):
self.__is_saving_call = False if p[0].gettokentype() == 'Output':
self.__is_end = False return AbstractSyntaxTree.Output(p[2])
elif p[0].gettokentype() == 'Input':
return AbstractSyntaxTree.Input(p[2])
self.__saved_ast_elements: List[ASTElement] = [] @self.pg.production('expression : ValueString')
def expression(p):
try:
string = p[0].value
if '\"' in p[0].value:
string = str(p[0].value).replace('\"', '')
elif '\'' in p[0].value:
string = str(p[0].value).replace('\'', '')
def parse(self, tokens: List[Token]) -> AbstractSyntaxTree: return AbstractSyntaxTree.String(string)
self.__is_end = False except Exception as e:
print(e)
raise ValueError(p)
if len(tokens) > 0: @self.pg.production('expression : BoolTrue')
toks = [] @self.pg.production('expression : BoolFalse')
for tok in tokens: def expression(p):
toks.append({tok.type.name, tok.value}) try:
return AbstractSyntaxTree.Bool(p[0].value)
except Exception as e:
print(e)
raise ValueError(p)
print(self.__repo.line_number, toks) @self.pg.production('expression : ValueNumber')
def expression(p):
try:
return AbstractSyntaxTree.Number(int(p[0].value))
except Exception as e:
print(e)
raise ValueError(p)
for i in range(0, len(tokens)): @self.pg.production('expression : LeftParenthesis expression RightParenthesis')
token = tokens[i] def expression(p):
return p[1]
self.__check_for_expected_tokens(token) @self.pg.production('expression : expression Plus expression')
@self.pg.production('expression : expression Minus expression')
# keywords @self.pg.production('expression : expression Asterisk expression')
if token.type == TokenTypes.Keyword: @self.pg.production('expression : expression Slash expression')
if token.value == Keywords.Public.value: def expression(p):
self.__saved_tokens.append(token) left = p[0]
print('save public') right = p[2]
self.__add_expected_token(TokenTypes.Keyword, Keywords.Library.value) if p[1].gettokentype() == 'Plus':
self.__add_expected_token(TokenTypes.Keyword, Keywords.Class.value) return AbstractSyntaxTree.Add(left, right)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Function.value) elif p[1].gettokentype() == 'Minus':
self.__add_expected_token(TokenTypes.Keyword, Keywords.Variable.value) return AbstractSyntaxTree.Sub(left, right)
elif p[1].gettokentype() == 'Asterisk':
elif token.value == Keywords.Library.value: return AbstractSyntaxTree.Mul(left, right)
self.__saved_tokens.append(token) elif p[1].gettokentype() == 'Slash':
print('save lib') return AbstractSyntaxTree.Div(left, right)
self.__add_expected_token(TokenTypes.Name)
elif token.value == Keywords.Class.value:
self.__saved_tokens.append(token)
print('save class')
self.__add_expected_token(TokenTypes.Name)
elif token.value == Keywords.Function.value:
self.__saved_tokens.append(token)
print('save function')
self.__add_expected_token(TokenTypes.Name)
elif token.value == Keywords.Variable.value:
self.__saved_tokens.append(token)
print('save var')
self.__add_expected_token(TokenTypes.Name)
# names
elif token.type == TokenTypes.Name:
if self.__is_saving_variable() and self.__is_saving_value:
self.__save_name(token)
if self.__is_saving_variable():
self.__is_saving_call = True
# names could be variable types, validation check in evaluator
elif self.__is_saving_variable() and self.__is_saving_type:
self.__save_type(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Left_Parenthesis.value)
elif self.__is_saving_variable():
self.__save_name(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Colon.value)
elif self.__is_saving_function():
self.__save_name(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Left_Parenthesis.value)
elif self.__is_saving_library() or self.__is_saving_class():
self.__save_name(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Left_Brace.value)
# format chars
elif token.type == TokenTypes.Format_Character:
if token.value == FormatCharacters.Left_Brace.value:
if self.__is_saving_library():
self.__save_library()
elif self.__is_saving_class():
self.__save_class()
elif token.value == FormatCharacters.Colon.value:
if self.__is_saving_variable():
self.__is_saving_type = True
self.__add_expected_token(TokenTypes.Type)
self.__add_expected_token(TokenTypes.Name)
elif token.value == FormatCharacters.Semicolon.value:
self.__end_line()
elif token.value == FormatCharacters.Left_Parenthesis.value:
self.__add_expected_token(TokenTypes.Name)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Right_Parenthesis.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.This.value)
self.__add_expected_token(TokenTypes.Name)
self.__add_expected_token(TokenTypes.Empty)
self.__add_expected_token(TokenTypes.Number)
self.__add_expected_token(TokenTypes.String)
self.__add_expected_token(TokenTypes.Bool)
if self.__is_saving_call:
self.__saved_tokens.append(token)
elif token.value == FormatCharacters.Right_Parenthesis.value:
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Semicolon.value)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Colon.value)
if self.__is_saving_call:
self.__saved_tokens.append(token)
self.__save_call()
self.__is_saving_call = False
# expr chars
elif token.type == TokenTypes.Expression_Character:
if token.value == ExpressionCharacters.Equal.value:
self.__is_saving_value = True
self.__add_expected_token(TokenTypes.Bool)
self.__add_expected_token(TokenTypes.Number)
self.__add_expected_token(TokenTypes.String)
self.__add_expected_token(TokenTypes.Empty)
self.__add_expected_token(TokenTypes.Name)
self.__add_expected_token(TokenTypes.Keyword, Keywords.This.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Input.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Range.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Length.value)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Left_Parenthesis.value)
self.__add_expected_token(TokenTypes.Type, Datatypes.Empty.value)
# types
elif token.type == TokenTypes.Type:
if self.__is_saving_variable() and self.__is_saving_value:
self.__save_value(token)
elif self.__is_saving_variable():
self.__save_type(token)
# values
elif token.type == TokenTypes.Bool or token.type == TokenTypes.Number or token.type == TokenTypes.String:
if self.__is_saving_call:
self.__saved_tokens.append(token)
elif self.__is_saving_value:
self.__save_value(token)
return self.__ast
"""
Utils
"""
def __save_name(self, token: Token):
self.__saved_tokens.append(token)
print('save name')
def __add_expected_token(self, token_type: TokenTypes, value: str = ''):
self.__expected_tokens.append(Token(token_type, value))
def __check_for_expected_tokens(self, token: Token):
error_token: Optional[Token] = None
is_error = True
for expected_token in self.__expected_tokens:
if self.__is_end or token.type != expected_token.type or expected_token.value != '' and token.value != expected_token.value:
error_token = token
else: else:
is_error = False raise AssertionError('Oops, this should not be possible!')
if error_token is not None and is_error: @self.pg.error
self.__utils.error(Error(ErrorCodes.Unexpected, f'{error_token.type.name} {error_token.value}')) def error_handle(token):
raise ValueError(token)
self.__expected_tokens = [] def get_parser(self):
return self.pg.build()
def __end_line(self):
if self.__is_saving_variable():
self.__save_variable()
self.__is_end = True
self.__expected_tokens = []
self.__is_saving_type = False
self.__is_saving_value = False
"""
Library
"""
def __is_saving_library(self) -> bool:
found = False
for token in self.__saved_tokens:
if token.type == TokenTypes.Keyword and token.value == Keywords.Library.value:
found = True
return found
def __save_library(self) -> None:
is_public = False
name_token: Optional[Token] = None
for token in self.__saved_tokens:
if token.type == TokenTypes.Name:
name_token = token
elif token.type == TokenTypes.Keyword and token.value == Keywords.Public.value:
is_public = True
if name_token is not None:
self.__ast.libraries.append(LibraryDefinitionNode(is_public, name_token.value))
self.__saved_tokens = []
self.__add_expected_token(TokenTypes.Keyword, Keywords.Public.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Class.value)
print('saved library')
"""
Class
"""
def __is_saving_class(self) -> bool:
found = False
for token in self.__saved_tokens:
if token.type == TokenTypes.Keyword and token.value == Keywords.Class.value:
found = True
return found
def __save_class(self) -> None:
is_public = False
name_token: Optional[Token] = None
for token in self.__saved_tokens:
if token.type == TokenTypes.Name:
name_token = token
elif token.type == TokenTypes.Keyword and token.value == Keywords.Public.value:
is_public = True
if name_token is not None:
self.__ast.libraries[len(self.__ast.libraries) - 1].classes.append(ClassDefinitionNode(is_public, name_token.value))
self.__saved_tokens = []
self.__add_expected_token(TokenTypes.Keyword, Keywords.Public.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Variable.value)
self.__add_expected_token(TokenTypes.Keyword, Keywords.Function.value)
print('saved class')
"""
Call
"""
def __save_call(self) -> None:
name_token: Optional[Token] = None
args: List[ValueNode] = []
remove_tokens = []
is_call = False
for i in range(0, len(self.__saved_tokens)):
token = self.__saved_tokens[i]
last_token: Optional[Token] = None
if i-1 > 0:
last_token = self.__saved_tokens[i-1]
if token.type == TokenTypes.Name and last_token is not None and last_token.type == TokenTypes.Format_Character and last_token.value == FormatCharacters.Left_Parenthesis.value:
pass
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Parenthesis.value and last_token is not None and last_token.type == TokenTypes.Name:
name_token = last_token
remove_tokens.append(last_token)
remove_tokens.append(token)
is_call = True
elif is_call and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Right_Parenthesis.value:
remove_tokens.append(token)
elif is_call and token.type == TokenTypes.Bool or token.type == TokenTypes.Number or token.type == TokenTypes.String:
args.append(ValueNode(token.value, Datatypes[token.type.name]))
remove_tokens.append(token)
if name_token is not None:
call = CallDefinitionNode(name_token.value)
call.args = args
self.__saved_ast_elements.append(call)
for token in remove_tokens:
self.__saved_tokens.remove(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Semicolon.value)
print('saved call')
"""
Function
"""
def __is_saving_function(self) -> bool:
found = False
for token in self.__saved_tokens:
if token.type == TokenTypes.Keyword and token.value == Keywords.Function.value:
found = True
return found
def __save_function(self) -> None:
is_public = False
name_token: Optional[Token] = None
return_type: Optional[Token] = None
for token in self.__saved_tokens:
if token.type == TokenTypes.Name:
name_token = token
elif token.type == TokenTypes.Keyword and token.value == Keywords.Public.value:
is_public = True
elif token.type == TokenTypes.Type:
return_type = token
if name_token is not None and return_type is not None:
self.__ast.libraries[len(self.__ast.libraries) - 1].classes[len(self.__ast.libraries) - 1].functions.append(
FunctionDefinitionNode(is_public, name_token.value, Datatypes[return_type.value]))
self.__saved_tokens = []
print('saved function')
"""
Variable
"""
def __is_saving_variable(self) -> bool:
found = False
for token in self.__saved_tokens:
if token.type == TokenTypes.Keyword and token.value == Keywords.Variable.value:
found = True
return found
def __save_type(self, token: Token) -> None:
self.__saved_tokens.append(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Semicolon.value)
self.__add_expected_token(TokenTypes.Expression_Character, ExpressionCharacters.Equal.value)
self.__is_saving_type = False
print('save type')
def __save_variable(self):
is_public = False
name_token: Optional[Token] = None
datatype: Optional[Token] = None
value: Optional[Union[str, Token, CallDefinitionNode]] = None
reset_saved_ast = False
for token in self.__saved_tokens:
if token.type == TokenTypes.Name and name_token is None:
name_token = token
elif token.type == TokenTypes.Keyword and token.value == Keywords.Public.value:
is_public = True
elif token.type == TokenTypes.Type or name_token is not None and token.type == TokenTypes.Name:
datatype = token
value = Token(TokenTypes.Empty, TokenTypes.Empty.value)
for saved_ast_element in self.__saved_ast_elements:
if isinstance(saved_ast_element, CallDefinitionNode):
value = saved_ast_element
reset_saved_ast = True
if reset_saved_ast:
self.__saved_ast_elements = []
if name_token is not None and datatype is not None and value is not None:
if not isinstance(value, CallDefinitionNode):
value = value.value
if datatype.type == TokenTypes.Name:
variable = VariableDefinitionNode(is_public, name_token.value, datatype.value, value)
else:
variable = VariableDefinitionNode(is_public, name_token.value, Datatypes[str(datatype.value).capitalize()], value)
if len(self.__ast.libraries) > 0:
lib = self.__ast.libraries[len(self.__ast.libraries) - 1]
if len(lib.classes) > 0:
cl = lib.classes[len(lib.classes) - 1]
if len(cl.functions) == 0:
cl.variables.append(variable)
else:
cl.functions[len(cl.functions) - 1].variables.append(variable)
self.__saved_tokens = []
print('saved variable')
"""
Value
"""
def __save_value(self, token: Token):
self.__saved_tokens.append(token)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Left_Parenthesis.value)
self.__add_expected_token(TokenTypes.Format_Character, FormatCharacters.Semicolon.value)
self.__is_saving_value = False
print('save value')

View File

@ -1,5 +1,4 @@
from Interpreter.Interpreter import Interpreter from Interpreter.Interpreter import Interpreter
from CCLang_sly.Interpreter import Interpreter as SlyCCLangInterpreter
from Interpreter.Utils import Utils from Interpreter.Utils import Utils
from Interpreter.Repo import Repo from Interpreter.Repo import Repo
@ -10,4 +9,3 @@ class ServiceInitializer:
self.repo = Repo() self.repo = Repo()
self.utils = Utils(self.repo) self.utils = Utils(self.repo)
self.interpreter = Interpreter(self.repo, self.utils) self.interpreter = Interpreter(self.repo, self.utils)
self.sly_cclang_interpreter = SlyCCLangInterpreter(self.repo, self.utils)

View File

@ -12,7 +12,6 @@ class Main:
self.__utils = self.__services.utils self.__utils = self.__services.utils
self.__repo = self.__services.repo self.__repo = self.__services.repo
self.__interpreter = self.__services.interpreter self.__interpreter = self.__services.interpreter
self.__sly_cclang_interpreter = self.__services.sly_cclang_interpreter
def console(self) -> None: def console(self) -> None:
""" """
@ -22,8 +21,7 @@ class Main:
i = 0 i = 0
while self.__repo.error is None: while self.__repo.error is None:
self.__repo.line_number = i + 1 self.__repo.line_number = i + 1
#self.__interpreter.interpret(input('> ')) self.__interpreter.interpret(input('> '))
self.__sly_cclang_interpreter.interpret(input('> '))
i += 1 i += 1
def files(self, file: str) -> None: def files(self, file: str) -> None:
@ -43,8 +41,7 @@ class Main:
f = open(file, 'r', encoding='utf-8').readlines() f = open(file, 'r', encoding='utf-8').readlines()
for i in range(0, len(f)): for i in range(0, len(f)):
self.__repo.line_number = i + 1 self.__repo.line_number = i + 1
# self.__interpreter.interpret(f[i]) self.__interpreter.interpret(f[i])
self.__sly_cclang_interpreter.interpret(f[i])
if __name__ == '__main__': if __name__ == '__main__':

6
src/test.cc Normal file
View File

@ -0,0 +1,6 @@
output(3+3+3);
output(3);
output("Hallo");
output(false);
output(true);
output(input("Name: "));