Added functionallity to handle use directives

This commit is contained in:
Sven Heidemann 2021-10-28 21:04:01 +02:00
parent ef2d615e7c
commit e7f9ce4451
3 changed files with 53 additions and 3 deletions

View File

@ -64,6 +64,8 @@ class Application(ApplicationABC):
self._runtime.file = file self._runtime.file = file
if self._runtime.file != '':
Console.write_line('FILE:', self._runtime.file)
f = open(file, 'r', encoding='utf-8').readlines() f = open(file, 'r', encoding='utf-8').readlines()
for i in range(0, len(f)): for i in range(0, len(f)):
self._runtime.line_count = i + 1 self._runtime.line_count = i + 1

View File

@ -12,4 +12,4 @@ class ASTTypesEnum(Enum):
FuncDeclaration = 'func_declaration' FuncDeclaration = 'func_declaration'
VariableDeclaration = 'variable_declaration' VariableDeclaration = 'variable_declaration'
VariableValue = 'variable_value' VariableValue = 'variable_value'
UseDirective = 'use_directive'

View File

@ -150,7 +150,7 @@ class ParserService(ParserABC):
break break
elif i == tokens.count()-1: elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Left_Brace.value)) self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Semicolon.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value in self._access_keywords: elif i == 0 and token.type == TokenTypes.Keyword and token.value in self._access_keywords:
ast.append(AST(ASTTypesEnum.Access, token.value, self._runtime.line_count, self._runtime.line_count)) ast.append(AST(ASTTypesEnum.Access, token.value, self._runtime.line_count, self._runtime.line_count))
@ -200,10 +200,58 @@ class ParserService(ParserABC):
return AST(ASTTypesEnum.VariableDeclaration, ast, self._runtime.line_count, self._runtime.line_count) return AST(ASTTypesEnum.VariableDeclaration, ast, self._runtime.line_count, self._runtime.line_count)
def _parse_use(self, tokens: List[Token]) -> AST:
""" Parses use imports
Args:
tokens (List[Token]): Tokens from lexer
AST:
use Program;
<use> <name> <end>
use Program.Test;
<use> <name> <.> <name> <end>
Returns:
AST: Library or class AST
"""
ast = List(AST)
i = 0
# for i in range(0, tokens.count()):
while i < tokens.count():
token: Token = tokens[i]
if i == tokens.count()-1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Semicolon.value:
break
elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Semicolon.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value == Keywords.Use.value:
self._ast.append(AST(ASTTypesEnum.Keyword, token.value))
elif i == 1 and token.type == TokenTypes.Name:
Console.write_line('NAME')
name, last_token = self._parse_name(tokens.skip(i))
if last_token is not None:
i = tokens.index(last_token)
ast.append(name)
else:
Console.write_line('TEST')
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
i += 1
return AST(ASTTypesEnum.UseDirective, ast, self._runtime.line_count, self._runtime.line_count)
def create_ast(self, tokens: List[Token]) -> List[AST]: def create_ast(self, tokens: List[Token]) -> List[AST]:
self._ast = List(AST) self._ast = List(AST)
if tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Library.value).count() > 0: if tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Use.value).count() > 0:
self._ast.append(self._parse_use(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Library.value).count() > 0:
self._ast.append(self._parse_library_or_class(tokens)) self._ast.append(self._parse_library_or_class(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Class.value).count() > 0: elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Class.value).count() > 0: