Compare commits

24 Commits
sly ... master

Author SHA1 Message Date
257a190ffe Added logic to handle constructor for classes 2021-10-29 12:18:26 +02:00
4aaffca9cf Refactored imports 2021-10-28 22:33:20 +02:00
6511d3d577 Removed debug Console.write_line() 2021-10-28 22:31:43 +02:00
2a65a66a86 Added functionallity to handle functions 2021-10-28 22:29:00 +02:00
e7f9ce4451 Added functionallity to handle use directives 2021-10-28 21:04:01 +02:00
ef2d615e7c Removed code workspace 2021-10-28 19:05:13 +02:00
e5ab18167a Added vs code workspace 2021-10-28 19:00:48 +02:00
e6895992c2 Improved parser to handle variables 2021-10-28 15:11:13 +02:00
b11ec9003e Improved parser to handle names and classes 2021-10-27 21:03:46 +02:00
ff025bdc7f Removed old src 2021-10-27 20:17:22 +02:00
8f01e49479 Changed vs code color 2021-10-27 20:13:24 +02:00
2b02341336 Added logic to handle lib & class declaration 2021-10-27 18:30:22 +02:00
793ca62ddd Merge pull request 'cpl-rewrite' (#1) from cpl-rewrite into master
Reviewed-on: edraft/sh_cclang#1
2021-10-27 09:22:25 +02:00
f6642f834b Renamed .cc to .ccl 2021-10-27 09:21:40 +02:00
ad10bc0357 Added .vscode 2021-10-27 09:19:05 +02:00
4af133ab21 Added Parser 2021-10-27 09:18:57 +02:00
1f6d13551b Added CPL 2021.10.0 support 2021-10-26 23:32:08 +02:00
1197863296 Moved language design classes 2021-08-14 16:44:51 +02:00
62f8e86409 Improved language design 2021-08-14 16:43:48 +02:00
c909ce31d8 Added lexer 2021-08-14 16:41:25 +02:00
4a3f9b4b9d Added model files 2021-08-02 16:02:29 +02:00
a97de91fc4 Improved workspace structure 2021-08-02 15:58:05 +02:00
1577c78c51 Added CPL workspace 2021-08-02 14:55:10 +02:00
9c6034f7ae Added CPL workspace 2021-08-02 14:55:04 +02:00
88 changed files with 1586 additions and 811 deletions

3
.gitignore vendored
View File

@@ -1,2 +1,3 @@
*.pyc
/.idea/
/.idea/
venv

59
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,59 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Aktuelle Datei",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"env": {
"PYTHONPATH": "${workspaceFolder}/src/:${workspaceFolder}/venv/bin/python"
}
},
{
"name": "CCL",
"type": "python",
"request": "launch",
"cwd": "/home/sven/Nextcloud_Sven/Schreibtisch/git_sh-edraft_de/sh_cclang/src/",
"program": "cc_lang_interpreter/main.py",
"console": "integratedTerminal",
"args": [
"First.ccl"
],
"env": {
"PYTHONPATH": "${workspaceFolder}/src/:${workspaceFolder}/venv/bin/python"
}
},
{
"name": "CCL: File",
"type": "python",
"request": "launch",
"cwd": "/home/sven/Nextcloud_Sven/Schreibtisch/git_sh-edraft_de/sh_cclang/src/",
"program": "cc_lang_interpreter/main.py",
"console": "integratedTerminal",
"args": [
"-p",
"../cc_code_preview"
],
"env": {
"PYTHONPATH": "${workspaceFolder}/src/:${workspaceFolder}/venv/bin/python"
}
},
{
"name": "CCL: Console",
"type": "python",
"request": "launch",
"cwd": "/home/sven/Nextcloud_Sven/Schreibtisch/git_sh-edraft_de/sh_cclang/src/",
"program": "cc_lang_interpreter/main.py",
"console": "integratedTerminal",
"args": [],
"env": {
"PYTHONPATH": "${workspaceFolder}/src/:${workspaceFolder}/venv/bin/python"
}
}
]
}

25
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,25 @@
{
"workbench.colorCustomizations": {
"activityBar.activeBackground": "#93e6fc",
"activityBar.activeBorder": "#fa45d4",
"activityBar.background": "#93e6fc",
"activityBar.foreground": "#15202b",
"activityBar.inactiveForeground": "#15202b99",
"activityBarBadge.background": "#fa45d4",
"activityBarBadge.foreground": "#15202b",
"editorGroup.border": "#93e6fc",
"panel.border": "#93e6fc",
"sash.hoverBorder": "#93e6fc",
"sideBar.border": "#93e6fc",
"statusBar.background": "#61dafb",
"statusBar.foreground": "#15202b",
"statusBarItem.hoverBackground": "#2fcefa",
"statusBarItem.remoteBackground": "#61dafb",
"statusBarItem.remoteForeground": "#15202b",
"titleBar.activeBackground": "#61dafb",
"titleBar.activeForeground": "#15202b",
"titleBar.inactiveBackground": "#61dafb99",
"titleBar.inactiveForeground": "#15202b99"
},
"peacock.color": "#61dafb"
}

15
appsettings.json Normal file
View File

@@ -0,0 +1,15 @@
{
"TimeFormatSettings": {
"DateFormat": "%Y-%m-%d",
"TimeFormat": "%H:%M:%S",
"DateTimeFormat": "%Y-%m-%d %H:%M:%S.%f",
"DateTimeLogFormat": "%Y-%m-%d_%H-%M-%S"
},
"LoggingSettings": {
"Path": "logs/",
"Filename": "log_$start_time.log",
"ConsoleLogLevel": "ERROR",
"FileLogLevel": "WARN"
}
}

View File

@@ -0,0 +1,13 @@
public lib Preview.Classes {
public class Test {
private var _name: string;
constructor(name: string) {
this._name = name;
}
public func getName(): string {
return this._name;
}
}
}

View File

@@ -0,0 +1,9 @@
public lib Preview.Functions {
public func isTrue(value: bool): bool {
return value;
}
public func add(value1: number, value2: number): number {
return value1 + value2;
}
}

View File

@@ -0,0 +1,6 @@
public lib Preview.Variables {
public var str: string = "Hello World";
public var test: bool = false;
public var test2: Test = empty;
public var test3: Test = Test(34);
}

View File

@@ -0,0 +1,26 @@
use Preview.Variables;
use Preview.Functions;
use Preview.Classes;
public lib Main {
public class Program {
private var test: Test;
constructor() {
this.test = Test(str);
}
public func Main(): empty {
output(str, test, test2, test3);
output(isTrue(test));
output(this.test.getName());
}
private func testForEach(): empty {
var loopList = [];
loopList.forEach(e => {
output(e);
});
}
}
}

12
cpl-workspace.json Normal file
View File

@@ -0,0 +1,12 @@
{
"WorkspaceSettings": {
"DefaultProject": "cc-lang-interpreter",
"Projects": {
"cc-lang-interpreter": "src/cc_lang_interpreter/cc-lang-interpreter.json",
"cc-lang": "src/cc_lang/cc-lang.json",
"parser": "src/parser/parser.json",
"lexer": "src/lexer/lexer.json",
"runtime": "src/runtime/runtime.json"
}
}
}

16
doc/target/main.bl Normal file
View File

@@ -0,0 +1,16 @@
use test1 from Tests;
use test2 as test3 from Tests;
lib Main {
class Program {
func Main(args: list): void {
var test_a: test1 = test1();
test_a.dec_vars();
test_a.is_error();
if (!error) {
var test_b: test3 = test3();
test3.continue();
}
}
}
}

35
doc/target/test.bl Normal file
View File

@@ -0,0 +1,35 @@
lib Tests
{
/*
declaration of some tests
*/
public class test1
{
public var test_string: string = 'Hello';
public var test_string_2: string = "Hello World";
public var test_num: num = 1;
public var test_num_2: num = 1.0;
public var test_num_3: num = this.test_num + this.test_num_2;
public func dec_vars(): void
{
var test_bool: bool = true;
var test_bool_2: bool = false;
var test_bool_3: bool = test_bool != test_bool_2; # true
}
public is_error(): bool
{
if (error != empty)
{
output(error.code + ' ' + error.message);
return true;
}
else
{
output('continue');
return false;
}
}
}
}

12
doc/target/test2.bl Normal file
View File

@@ -0,0 +1,12 @@
lib Tests {
public class test2 {
var string_a = strings();
public func continue(): void {
input(string_a.string1 + ': ');
}
}
class strings {
public var string1 = "hello world";
}
}

View File

@@ -1,223 +0,0 @@
from sly import Lexer
from sly import Parser
class BasicLexer(Lexer):
tokens = {NAME, NUMBER, STRING, IF, THEN, ELSE, FOR, FUN, TO, ARROW, EQEQ}
ignore = '\t '
literals = {'=', '+', '-', '/', '*', '(', ')', ',', ';'}
# Define tokens
IF = r'IF'
THEN = r'THEN'
ELSE = r'ELSE'
FOR = r'FOR'
FUN = r'FUN'
TO = r'TO'
ARROW = r'->'
NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
STRING = r'\".*?\"'
EQEQ = r'=='
@_(r'\d+')
def NUMBER(self, t):
t.value = int(t.value)
return t
@_(r'#.*')
def COMMENT(self, t):
pass
@_(r'\n+')
def newline(self, t):
self.lineno = t.value.count('\n')
class BasicParser(Parser):
tokens = BasicLexer.tokens
precedence = (
('left', '+', '-'),
('left', '*', '/'),
('right', 'UMINUS'),
)
def __init__(self):
self.env = {}
@_('')
def statement(self, p):
pass
@_('FOR var_assign TO expr THEN statement')
def statement(self, p):
return ('for_loop', ('for_loop_setup', p.var_assign, p.expr), p.statement)
@_('IF condition THEN statement ELSE statement')
def statement(self, p):
return ('if_stmt', p.condition, ('branch', p.statement0, p.statement1))
@_('FUN NAME "(" ")" ARROW statement')
def statement(self, p):
return ('fun_def', p.NAME, p.statement)
@_('NAME "(" ")"')
def statement(self, p):
return ('fun_call', p.NAME)
@_('expr EQEQ expr')
def condition(self, p):
return ('condition_eqeq', p.expr0, p.expr1)
@_('var_assign')
def statement(self, p):
return p.var_assign
@_('NAME "=" expr')
def var_assign(self, p):
return ('var_assign', p.NAME, p.expr)
@_('NAME "=" STRING')
def var_assign(self, p):
return ('var_assign', p.NAME, p.STRING)
@_('expr')
def statement(self, p):
return (p.expr)
@_('expr "+" expr')
def expr(self, p):
return ('add', p.expr0, p.expr1)
@_('expr "-" expr')
def expr(self, p):
return ('sub', p.expr0, p.expr1)
@_('expr "*" expr')
def expr(self, p):
return ('mul', p.expr0, p.expr1)
@_('expr "/" expr')
def expr(self, p):
return ('div', p.expr0, p.expr1)
@_('"-" expr %prec UMINUS')
def expr(self, p):
return p.expr
@_('NAME')
def expr(self, p):
return ('var', p.NAME)
@_('NUMBER')
def expr(self, p):
return ('num', p.NUMBER)
class BasicExecute:
def __init__(self, tree, env):
self.env = env
result = self.walkTree(tree)
if result is not None and isinstance(result, int):
print(result)
if isinstance(result, str) and result[0] == '"':
print(result)
def walkTree(self, node):
if isinstance(node, int):
return node
if isinstance(node, str):
return node
if node is None:
return None
if node[0] == 'program':
if node[1] == None:
self.walkTree(node[2])
else:
self.walkTree(node[1])
self.walkTree(node[2])
if node[0] == 'num':
return node[1]
if node[0] == 'str':
return node[1]
if node[0] == 'if_stmt':
result = self.walkTree(node[1])
if result:
return self.walkTree(node[2][1])
return self.walkTree(node[2][2])
if node[0] == 'condition_eqeq':
return self.walkTree(node[1]) == self.walkTree(node[2])
if node[0] == 'fun_def':
self.env[node[1]] = node[2]
if node[0] == 'fun_call':
try:
return self.walkTree(self.env[node[1]])
except LookupError:
print("Undefined function '%s'" % node[1])
return 0
if node[0] == 'add':
return self.walkTree(node[1]) + self.walkTree(node[2])
elif node[0] == 'sub':
return self.walkTree(node[1]) - self.walkTree(node[2])
elif node[0] == 'mul':
return self.walkTree(node[1]) * self.walkTree(node[2])
elif node[0] == 'div':
return self.walkTree(node[1]) / self.walkTree(node[2])
if node[0] == 'var_assign':
self.env[node[1]] = self.walkTree(node[2])
return node[1]
if node[0] == 'var':
try:
return self.env[node[1]]
except LookupError:
print("Undefined variable '" + node[1] + "' found!")
return 0
if node[0] == 'for_loop':
if node[1][0] == 'for_loop_setup':
loop_setup = self.walkTree(node[1])
loop_count = self.env[loop_setup[0]]
loop_limit = loop_setup[1]
for i in range(loop_count + 1, loop_limit + 1):
res = self.walkTree(node[2])
if res is not None:
print(res)
self.env[loop_setup[0]] = i
del self.env[loop_setup[0]]
if node[0] == 'for_loop_setup':
return (self.walkTree(node[1]), self.walkTree(node[2]))
if __name__ == '__main__':
lexer = BasicLexer()
parser = BasicParser()
env = {}
while True:
try:
text = input('basic > ')
except EOFError:
break
if text:
tokens = lexer.tokenize(text)
tree = parser.parse(tokens)
for t in tree:
print(t)
# BasicExecute(tree, env)

View File

@@ -1,59 +0,0 @@
from rply.token import BaseBox
class AbstractSyntaxTree:
class BinaryOp(BaseBox):
def __init__(self, left, right):
self.left = left
self.right = right
class Number(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class String(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class Bool(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return self.value
class Add(BinaryOp):
def eval(self):
return self.left.eval() + self.right.eval()
class Sub(BinaryOp):
def eval(self):
return self.left.eval() - self.right.eval()
class Mul(BinaryOp):
def eval(self):
return self.left.eval() * self.right.eval()
class Div(BinaryOp):
def eval(self):
return self.left.eval() / self.right.eval()
class Output(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
print('>>', self.value.eval())
class Input(BaseBox):
def __init__(self, value):
self.value = value
def eval(self):
return input(f'<< {self.value.eval()}')

View File

@@ -1,33 +0,0 @@
from typing import Optional
from Interpreter.Validator import Validator
from Interpreter.Lexer import Lexer
from Interpreter.Parser import Parser
from Interpreter.Repo import Repo
from Interpreter.Utils import Utils
from Models.AbstractSyntaxTree.AbstractSyntaxTree import AbstractSyntaxTree
class Interpreter:
def __init__(self, repo: Repo, utils: Utils) -> None:
self.__repo = repo
self.__utils = utils
self.__lexer = Lexer().get_lexer()
parser = Parser()
parser.parse()
self.__parser = parser.get_parser()
self.__validator = Validator(repo, utils)
def interpret(self, line_str: str) -> None:
"""
Interprets code line
:param line_str:
:return:
"""
tokens = self.__lexer.lex(line_str)
#for t in tokens:
# print(t)
ast = self.__parser.parse(tokens)
ast.eval()

View File

@@ -1,19 +0,0 @@
from rply import LexerGenerator
from Models.CCLang.TokenDefinition import TokenDefinition
class Lexer:
def __init__(self):
self.lexer = LexerGenerator()
def _add_tokens(self):
for t in TokenDefinition:
self.lexer.add(t.name, t.value)
# Ignore spaces
self.lexer.ignore('\s+')
def get_lexer(self):
self._add_tokens()
return self.lexer.build()

View File

@@ -1,106 +0,0 @@
from rply import ParserGenerator
from Interpreter.AbstractSyntaxTree import AbstractSyntaxTree
from Models.CCLang.TokenDefinition import TokenDefinition
class Parser:
def __init__(self):
tokens = []
for t in TokenDefinition:
tokens.append(t.name)
self.pg = ParserGenerator(
# A list of all token names accepted by the parser.
[
TokenDefinition.ValueNumber.name,
TokenDefinition.Plus.name,
TokenDefinition.Minus.name,
TokenDefinition.Asterisk.name,
TokenDefinition.Slash.name,
TokenDefinition.LeftParenthesis.name,
TokenDefinition.RightParenthesis.name,
TokenDefinition.Output.name,
TokenDefinition.Semicolon.name,
TokenDefinition.ValueString.name,
TokenDefinition.BoolTrue.name,
TokenDefinition.BoolFalse.name,
TokenDefinition.Input.name
],
precedence=[
('left', ['Plus', 'Minus']),
('left', ['Slash', 'Asterisk'])
]
)
def parse(self):
#@self.pg.production('input : Input LeftParenthesis expression RightParenthesis Semicolon')
#def statement(p):
# return AbstractSyntaxTree.Input(p[2])
@self.pg.production('builtin : Output LeftParenthesis expression RightParenthesis Semicolon')
@self.pg.production('builtin : Input LeftParenthesis expression RightParenthesis Semicolon')
def builtin(p):
if p[0].gettokentype() == 'Output':
return AbstractSyntaxTree.Output(p[2])
elif p[0].gettokentype() == 'Input':
return AbstractSyntaxTree.Input(p[2])
@self.pg.production('expression : ValueString')
def expression(p):
try:
string = p[0].value
if '\"' in p[0].value:
string = str(p[0].value).replace('\"', '')
elif '\'' in p[0].value:
string = str(p[0].value).replace('\'', '')
return AbstractSyntaxTree.String(string)
except Exception as e:
print(e)
raise ValueError(p)
@self.pg.production('expression : BoolTrue')
@self.pg.production('expression : BoolFalse')
def expression(p):
try:
return AbstractSyntaxTree.Bool(p[0].value)
except Exception as e:
print(e)
raise ValueError(p)
@self.pg.production('expression : ValueNumber')
def expression(p):
try:
return AbstractSyntaxTree.Number(int(p[0].value))
except Exception as e:
print(e)
raise ValueError(p)
@self.pg.production('expression : LeftParenthesis expression RightParenthesis')
def expression(p):
return p[1]
@self.pg.production('expression : expression Plus expression')
@self.pg.production('expression : expression Minus expression')
@self.pg.production('expression : expression Asterisk expression')
@self.pg.production('expression : expression Slash expression')
def expression(p):
left = p[0]
right = p[2]
if p[1].gettokentype() == 'Plus':
return AbstractSyntaxTree.Add(left, right)
elif p[1].gettokentype() == 'Minus':
return AbstractSyntaxTree.Sub(left, right)
elif p[1].gettokentype() == 'Asterisk':
return AbstractSyntaxTree.Mul(left, right)
elif p[1].gettokentype() == 'Slash':
return AbstractSyntaxTree.Div(left, right)
else:
raise AssertionError('Oops, this should not be possible!')
@self.pg.error
def error_handle(token):
raise ValueError(token)
def get_parser(self):
return self.pg.build()

View File

@@ -1,73 +0,0 @@
from Models.Interpreter.Datatypes import Datatypes
from Models.Token.TokenValueTypes import FormatCharacters, Booleans, Keywords, ExpressionCharacters
class Repo:
def __init__(self) -> None:
self.debug = True
self.line_number: int = 0
# interpreter
self.keywords = [
# define keywords
Keywords.Library.value,
Keywords.Class.value,
Keywords.Function.value,
Keywords.Variable.value,
Keywords.Use.value,
Keywords.From.value,
# builtin functions
Keywords.Output.value,
Keywords.Input.value,
Keywords.Length.value,
Keywords.Range.value,
Keywords.Exit.value,
# normal keywords
Keywords.If.value,
Keywords.ElseIf.value,
Keywords.Else.value,
Keywords.Continue.value,
Keywords.If.value,
Keywords.Return.value,
# loops
Keywords.While.value,
Keywords.For.value,
# access
Keywords.Public.value,
Keywords.This.value
]
self.datatypes = [
Datatypes.Empty.value,
Datatypes.Any.value,
Datatypes.Number.value,
Datatypes.String.value,
Datatypes.Bool.value,
Datatypes.List.value,
Datatypes.Dict.value
]
self.format_chars = [
FormatCharacters.Left_Brace.value,
FormatCharacters.Right_Brace.value,
FormatCharacters.Left_Parenthesis.value,
FormatCharacters.Right_Parenthesis.value,
FormatCharacters.Left_Bracket.value,
FormatCharacters.Right_Bracket.value,
FormatCharacters.Semicolon.value,
FormatCharacters.Colon.value,
FormatCharacters.Comma.value,
FormatCharacters.Point.value
]
self.expr_chars = [
ExpressionCharacters.Plus.value,
ExpressionCharacters.Minus.value,
ExpressionCharacters.Asterisk.value,
ExpressionCharacters.Slash.value,
ExpressionCharacters.Equal.value,
ExpressionCharacters.Caret.value
]
self.bool_expr_chars = ['<', '>', '!', '!=', '==', '>=', '<=', '&&', '||']
self.bool_values = [Booleans.Right.value, Booleans.Wrong.value]
# runtime
self.error = None

View File

@@ -1,26 +0,0 @@
from termcolor import colored
from Interpreter.Repo import Repo
from Models.Interpreter.Error import Error
class Utils:
def __init__(self, repo: Repo) -> None:
self.__repo = repo
def input(self, prefix: str) -> str:
return input(prefix)
def output(self, text: str) -> None:
print(f'-> {text}')
def error(self, error: Error) -> None:
self.__repo.error = error
print(colored(f'Error in line {self.__repo.line_number}\n{self.__repo.error.msg}', 'red'))
# exit()
def runtime_error(self, error: Error) -> None:
self.__repo.error = error
print(colored(f'{self.__repo.error.msg}', 'red'))
# exit()

View File

@@ -1,12 +0,0 @@
from Interpreter.Repo import Repo
from Interpreter.Utils import Utils
class Validator:
def __init__(self, repo: Repo, utils: Utils) -> None:
self.__repo = repo
self.__utils = utils
def validate(self, ast: []) -> None:
pass

View File

@@ -1,92 +0,0 @@
from typing import List, Union
from Models.Interpreter.Datatypes import Datatypes
from Models.Token.TokenValueTypes import ExpressionCharacters
class AbstractSyntaxTree:
def __init__(self):
self.libraries: List[LibraryDefinitionNode] = []
class ASTElement:
def __init__(self):
pass
class ValueNode(ASTElement):
def __init__(self, value: str, datatype: Datatypes):
super().__init__()
self.value = value
self.type = datatype
class BinaryOperationNode(ASTElement):
def __init__(self, left: str, op_token: str, right: str):
super().__init__()
self.left = left
self.op_token = op_token
self.right = right
self.operation_chars = [
ExpressionCharacters.Plus.value,
ExpressionCharacters.Minus.value,
ExpressionCharacters.Asterisk.value,
ExpressionCharacters.Slash.value,
ExpressionCharacters.Caret.value
]
def eval(self):
if self.op_token in self.operation_chars:
return eval(f'{self.left} {self.op_token} {self.right}')
class LibraryDefinitionNode(ASTElement):
def __init__(self, is_public: bool, name: str):
super().__init__()
self.is_public = is_public
self.name = name
self.classes: List[ClassDefinitionNode] = []
class ClassDefinitionNode(ASTElement):
def __init__(self, is_public: bool, name: str):
super().__init__()
self.is_public = is_public
self.name = name
self.variables: [VariableDefinitionNode] = []
self.functions: List[FunctionDefinitionNode] = []
class CallDefinitionNode(ASTElement):
def __init__(self, name: str):
super().__init__()
self.name = name
self.args: List[ValueNode] = []
class FunctionDefinitionNode(ASTElement):
def __init__(self, is_public: bool, name: str, return_type: Datatypes):
super().__init__()
self.is_public = is_public
self.name = name
self.args: List[VariableDefinitionNode] = []
self.return_type = return_type
self.variables: [VariableDefinitionNode] = []
self.instructions: List[ASTElement] = []
class VariableDefinitionNode(ASTElement):
def __init__(self, is_public: bool, name: str, datatype: Union[str, Datatypes], value: Union[str, CallDefinitionNode]):
super().__init__()
self.is_public = is_public
self.name = name
self.datatype = datatype
self.value = value

View File

@@ -1,77 +0,0 @@
from enum import Enum
class TokenDefinition(Enum):
""" Keywords """
# define keywords
Library = r'lib'
Class = r'class'
Function = r'func'
Variable = r'var'
Use = r'use'
From = r'from'
As = r'as'
# builtin functions
Output = r'output'
Input = r'input'
Length = r'length'
Range = r'range'
Round = r'round'
Exit = r'exit'
# normal keywords
If = r'if'
ElseIf = r'elseif'
Else = r'else'
Continue = r'continue'
Return = r'return'
# loops
While = r'while'
For = r'for'
In = r'in'
# access
Public = r'public'
This = r'this'
""" Chars """
# format
LeftBrace = r'\{'
RightBrace = r'\}'
LeftParenthesis = r'\('
RightParenthesis = r'\)'
LeftBracket = r'\['
RightBracket = r'\]'
Semicolon = r'\;'
Colon = r'\:'
Comma = r'\,'
Point = r'\.'
# expr
Plus = r'\+'
Minus = r'\-'
Asterisk = r'\*'
Slash = r'\/'
Equal = r'\='
Caret = r'\^'
""" Values """
ValueString = r'\".*?\"'
ValueNumber = r'\d+'
# bool
BoolTrue = r'true'
BoolFalse = r'false'
""" Datatypes """
Empty = r'empty'
Number = r'Number'
String = r'string'
Bool = r'bool'
List = r'list'
Dict = r'dict'
Void = r'void'
""" other """
Name = r'[a-zA-Z_][a-zA-Z0-9_]*'

View File

@@ -1,8 +0,0 @@
from Models.Token import TokenTypes
class Token:
def __init__(self, token_type: TokenTypes, value: str) -> None:
self.type: TokenTypes = token_type
self.value: str = value

View File

View File

@@ -1,11 +0,0 @@
from Interpreter.Interpreter import Interpreter
from Interpreter.Utils import Utils
from Interpreter.Repo import Repo
class ServiceInitializer:
def __init__(self) -> None:
self.repo = Repo()
self.utils = Utils(self.repo)
self.interpreter = Interpreter(self.repo, self.utils)

1
src/cc_lang/__init__.py Normal file
View File

@@ -0,0 +1 @@
# imports

44
src/cc_lang/cc-lang.json Normal file
View File

@@ -0,0 +1,44 @@
{
"ProjectSettings": {
"Name": "cc-lang",
"Version": {
"Major": "0",
"Minor": "0",
"Micro": "0"
},
"Author": "",
"AuthorEmail": "",
"Description": "",
"LongDescription": "",
"URL": "",
"CopyrightDate": "",
"CopyrightName": "",
"LicenseName": "",
"LicenseDescription": "",
"Dependencies": [
"sh_cpl-core==2021.10.0.post1",
"sh_cpl-query==2021.10.0.post1"
],
"PythonVersion": ">=3.9.2",
"PythonPath": {
"linux": ""
},
"Classifiers": []
},
"BuildSettings": {
"ProjectType": "library",
"SourcePath": "",
"OutputPath": "../../dist",
"Main": "",
"EntryPoint": "",
"IncludePackageData": false,
"Included": [],
"Excluded": [
"*/__pycache__",
"*/logs",
"*/tests"
],
"PackageData": {},
"ProjectReferences": []
}
}

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,4 @@
class Type:
def __init__(self):
pass

View File

@@ -0,0 +1,4 @@
class Class:
def __init__(self):
pass

View File

@@ -0,0 +1,4 @@
class Function:
def __init__(self):
pass

View File

@@ -0,0 +1,71 @@
from cc_lang.model.datatypes import Datatypes
from cc_lang.model.language_definition_classes import Keywords, FormatCharacters, ExpressionCharacters, Booleans
class LanguageDefinition:
# interpreter
keywords = [
# define keywords
Keywords.Library.value,
Keywords.Class.value,
Keywords.Function.value,
Keywords.Variable.value,
Keywords.Use.value,
Keywords.From.value,
Keywords.Constructor.value,
# builtin functions
Keywords.Output.value,
Keywords.Input.value,
Keywords.Length.value,
Keywords.Range.value,
Keywords.Exit.value,
Keywords.ForEach.value,
# normal keywords
Keywords.If.value,
Keywords.ElseIf.value,
Keywords.Else.value,
Keywords.Continue.value,
Keywords.If.value,
Keywords.Return.value,
# loops
Keywords.While.value,
Keywords.For.value,
Keywords.Foreach.value,
# access
Keywords.Public.value,
Keywords.Private.value,
Keywords.Static.value,
Keywords.This.value,
Keywords.Type.value
]
datatypes = [
Datatypes.Empty.value,
Datatypes.Any.value,
Datatypes.Number.value,
Datatypes.String.value,
Datatypes.Bool.value,
Datatypes.List.value,
Datatypes.Dict.value
]
format_chars = [
FormatCharacters.Left_Brace.value,
FormatCharacters.Right_Brace.value,
FormatCharacters.Left_Parenthesis.value,
FormatCharacters.Right_Parenthesis.value,
FormatCharacters.Left_Bracket.value,
FormatCharacters.Right_Bracket.value,
FormatCharacters.Semicolon.value,
FormatCharacters.Colon.value,
FormatCharacters.Comma.value,
FormatCharacters.Point.value
]
expr_chars = [
ExpressionCharacters.Plus.value,
ExpressionCharacters.Minus.value,
ExpressionCharacters.Asterisk.value,
ExpressionCharacters.Slash.value,
ExpressionCharacters.Equal.value,
ExpressionCharacters.Caret.value
]
bool_expr_chars = ['<', '>', '!', '!=', '==', '>=', '<=', '&&', '||']
bool_values = [Booleans.Right.value, Booleans.Wrong.value]

View File

@@ -9,6 +9,7 @@ class Keywords(Enum):
Variable = 'var'
Use = 'use'
From = 'from'
Constructor = 'constructor'
# builtin functions
Output = 'output'
@@ -16,6 +17,7 @@ class Keywords(Enum):
Length = 'length'
Range = 'range'
Exit = 'exit'
ForEach = 'forEach'
# normal keywords
If = 'if'
@@ -28,10 +30,14 @@ class Keywords(Enum):
# loops
While = 'while'
For = 'for'
Foreach = 'foreach'
# access
Public = 'public'
Private = 'private'
Static = 'static'
This = 'this'
Type = 'type'
class Booleans(Enum):

View File

@@ -0,0 +1,4 @@
class Library:
def __init__(self):
pass

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,4 @@
class Bool:
def __init__(self):
pass

View File

@@ -0,0 +1,4 @@
class Number:
def __init__(self):
pass

View File

@@ -0,0 +1,4 @@
class String:
def __init__(self):
pass

View File

@@ -0,0 +1,4 @@
class Variable:
def __init__(self):
pass

View File

@@ -0,0 +1 @@
# imports:

View File

@@ -0,0 +1,85 @@
import os
from cpl_core.application import ApplicationABC
from cpl_core.configuration import ConfigurationABC
from cpl_core.console import Console
from cpl_core.dependency_injection import ServiceProviderABC
from cpl_query.extension.list import List
from lexer.abc.lexer_abc import LexerABC
from lexer.model.token import Token
from parser.abc.ast import AST
from parser.abc.parser_abc import ParserABC
from runtime.abc.runtime_service_abc import RuntimeServiceABC
from runtime.model.error import Error
from runtime.model.error_codes_enum import ErrorCodesEnum
class Application(ApplicationABC):
def __init__(self, config: ConfigurationABC, services: ServiceProviderABC):
ApplicationABC.__init__(self, config, services)
self._lexer: LexerABC = services.get_service(LexerABC)
self._parser: ParserABC = services.get_service(ParserABC)
self._runtime: RuntimeServiceABC = services.get_service(RuntimeServiceABC)
self._path = config.get_configuration('p')
def _interpret(self, line: str):
tokens: List[Token] = self._lexer.tokenize(line)
ast: List[AST] = self._parser.create_ast(tokens)
line.replace("\n", "").replace("\t", "")
Console.write_line(f'<{self._runtime.line_count}> LINE: {line}')
# header, values = ['Type', 'Value'], []
# tokens.for_each(lambda t: values.append([t.type, t.value]))
# Console.table(header, values)
Console.write(ast, '\n')
def _console(self):
i = 0
while True:
self._runtime.line_count = i + 1
self._interpret(Console.read('> '))
i += 1
def _files(self):
if not os.path.isdir(self._path):
raise FileNotFoundError(self._path)
# r=root, d=directories, f=files
for r, d, f in os.walk(self._path):
for file in f:
if file.endswith('.ccl'):
self._read_file(os.path.join(r, file))
def _read_file(self, file: str):
if not os.path.isfile(file):
self._runtime.error(Error(ErrorCodesEnum.FileNotFound))
if not file.endswith('.ccl'):
self._runtime.error(Error(ErrorCodesEnum.WrongFileType))
self._runtime.file = file
if self._runtime.file != '':
Console.write_line('FILE:', self._runtime.file)
f = open(file, 'r', encoding='utf-8').readlines()
for i in range(0, len(f)):
self._runtime.line_count = i + 1
self._interpret(f[i])
self._runtime.file = ''
def configure(self): pass
def main(self):
if self._path is None:
self._console()
return
if os.path.isfile(self._path):
self._read_file(self._path)
else:
self._files()

View File

@@ -0,0 +1,44 @@
{
"ProjectSettings": {
"Name": "cc-lang-interpreter",
"Version": {
"Major": "0",
"Minor": "0",
"Micro": "0"
},
"Author": "",
"AuthorEmail": "",
"Description": "",
"LongDescription": "",
"URL": "",
"CopyrightDate": "",
"CopyrightName": "",
"LicenseName": "",
"LicenseDescription": "",
"Dependencies": [
"sh_cpl-core==2021.10.0.post1",
"sh_cpl-query==2021.10.0.post1"
],
"PythonVersion": ">=3.9.2",
"PythonPath": {
"linux": ""
},
"Classifiers": []
},
"BuildSettings": {
"ProjectType": "console",
"SourcePath": "",
"OutputPath": "../../dist",
"Main": "cc_lang_interpreter.main",
"EntryPoint": "cc-lang-interpreter",
"IncludePackageData": false,
"Included": [],
"Excluded": [
"*/__pycache__",
"*/logs",
"*/tests"
],
"PackageData": {},
"ProjectReferences": []
}
}

View File

@@ -0,0 +1,14 @@
from cpl_core.application import ApplicationBuilder
from cc_lang_interpreter.application import Application
from cc_lang_interpreter.startup import Startup
def main():
app_builder = ApplicationBuilder(Application)
app_builder.use_startup(Startup)
app_builder.build().run()
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,30 @@
from cpl_core.application import StartupABC
from cpl_core.configuration import ConfigurationABC, ConsoleArgument
from cpl_core.dependency_injection import ServiceProviderABC, ServiceCollectionABC
from cpl_core.environment import ApplicationEnvironment
from lexer.abc.lexer_abc import LexerABC
from lexer.service.lexer_service import LexerService
from parser.abc.parser_abc import ParserABC
from parser.service.parser_service import ParserService
from runtime.abc.runtime_service_abc import RuntimeServiceABC
from runtime.service.runtime_service import RuntimeService
class Startup(StartupABC):
def __init__(self):
StartupABC.__init__(self)
def configure_configuration(self, config: ConfigurationABC, env: ApplicationEnvironment) -> ConfigurationABC:
config.add_console_argument(ConsoleArgument('-', 'p', [], ' ', is_value_token_optional=True))
config.add_console_arguments()
return config
def configure_services(self, services: ServiceCollectionABC, env: ApplicationEnvironment) -> ServiceProviderABC:
services.add_singleton(RuntimeServiceABC, RuntimeService)
services.add_singleton(LexerABC, LexerService)
services.add_singleton(ParserABC, ParserService)
return services.build_service_provider()

View File

@@ -1,53 +0,0 @@
import os
import sys
from Models.Interpreter.Error import Error, ErrorCodes
from ServiceInitializer import ServiceInitializer
class Main:
def __init__(self) -> None:
self.__services = ServiceInitializer()
self.__utils = self.__services.utils
self.__repo = self.__services.repo
self.__interpreter = self.__services.interpreter
def console(self) -> None:
"""
Getting code from console input
:return:
"""
i = 0
while self.__repo.error is None:
self.__repo.line_number = i + 1
self.__interpreter.interpret(input('> '))
i += 1
def files(self, file: str) -> None:
"""
Getting input from file
:param file:
:return:
"""
if not os.path.isfile(file):
self.__utils.runtime_error(Error(ErrorCodes.FileNotFound))
return
if not file.endswith('.cc'):
self.__utils.runtime_error(Error(ErrorCodes.WrongFileType))
return
f = open(file, 'r', encoding='utf-8').readlines()
for i in range(0, len(f)):
self.__repo.line_number = i + 1
self.__interpreter.interpret(f[i])
if __name__ == '__main__':
main = Main()
print(sys.argv)
if len(sys.argv) == 2:
main.files(sys.argv[1])
else:
main.console()

1
src/lexer/__init__.py Normal file
View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,14 @@
from abc import ABC, abstractmethod
from cpl_query.extension.list import List
from lexer.model.token import Token
class LexerABC(ABC):
@abstractmethod
def __init__(self): pass
@abstractmethod
def tokenize(self, line: str) -> List[Token]: pass

44
src/lexer/lexer.json Normal file
View File

@@ -0,0 +1,44 @@
{
"ProjectSettings": {
"Name": "lexer",
"Version": {
"Major": "0",
"Minor": "0",
"Micro": "0"
},
"Author": "",
"AuthorEmail": "",
"Description": "",
"LongDescription": "",
"URL": "",
"CopyrightDate": "",
"CopyrightName": "",
"LicenseName": "",
"LicenseDescription": "",
"Dependencies": [
"sh_cpl-core==2021.10.0.post1",
"sh_cpl-query==2021.10.0.post1"
],
"PythonVersion": ">=3.9.2",
"PythonPath": {
"linux": ""
},
"Classifiers": []
},
"BuildSettings": {
"ProjectType": "library",
"SourcePath": "",
"OutputPath": "../../dist",
"Main": "",
"EntryPoint": "",
"IncludePackageData": false,
"Included": [],
"Excluded": [
"*/__pycache__",
"*/logs",
"*/tests"
],
"PackageData": {},
"ProjectReferences": []
}
}

26
src/lexer/model/token.py Normal file
View File

@@ -0,0 +1,26 @@
from lexer.model.token_types import TokenTypes
class Token:
def __init__(self, token_type: TokenTypes, value: str) -> None:
self._type: TokenTypes = token_type
self._value: str = value
@property
def type(self) -> TokenTypes:
return self._type
@property
def value(self) -> str:
return self._value
@value.setter
def value(self, value: str):
self._value = value
def __repr__(self) -> str:
return f'Token <Type: {self._type}> <Value: {self._value}>'
def __str__(self) -> str:
return f'Token: Type: {self._type}, Value: {self._value}'

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,166 @@
from cpl_query.extension.list import List
from cc_lang.model.language_definition import LanguageDefinition
from lexer.abc.lexer_abc import LexerABC
from lexer.model.token import Token
from lexer.model.token_types import UnresolvedTokenTypes, TokenTypes
from runtime.abc.runtime_service_abc import RuntimeServiceABC
class LexerService(LexerABC):
def __init__(self, runtime: RuntimeServiceABC):
self._runtime = runtime
self._is_ml_comment = False
def _add_tok(self, tokens: List[Token], value: str, input_token_type: UnresolvedTokenTypes) -> None:
"""
Creates token object
:param value:
:param input_token_type:
:return:
"""
token_type: TokenTypes = TokenTypes.Empty
if value != '':
if input_token_type == UnresolvedTokenTypes.Word:
if value in LanguageDefinition.keywords:
token_type = TokenTypes.Keyword
elif value in LanguageDefinition.datatypes:
token_type = TokenTypes.Type
elif value in LanguageDefinition.bool_values:
token_type = TokenTypes.Bool
elif value == UnresolvedTokenTypes.Empty:
token_type = TokenTypes.Empty
else:
token_type = TokenTypes.Name
elif input_token_type == UnresolvedTokenTypes.Number:
token_type = TokenTypes.Number
elif input_token_type == UnresolvedTokenTypes.String:
token_type = TokenTypes.String
elif input_token_type == UnresolvedTokenTypes.Expression_Character:
token_type = TokenTypes.Expression_Character
elif input_token_type == UnresolvedTokenTypes.Bool_Expression_Character:
token_type = TokenTypes.Bool_Expression_Character
elif input_token_type == UnresolvedTokenTypes.Format_Character:
token_type = TokenTypes.Format_Character
tokens.append(Token(token_type, value))
def tokenize(self, line: str) -> List[Token]:
tokens: List[Token] = List(Token)
word = ''
ol_comment = False
is_string1 = False # 'hello'
is_string2 = False # "hello"
is_number = False
is_expr_char = False
for i in range(0, len(line)):
c = line[i]
# ignore comments and spaces
if not ol_comment and not self._is_ml_comment:
# comment filtering
if c == '#' and not is_string1 and not is_string2:
ol_comment = True
elif line[i - 1] == '/' and c == '/':
ol_comment = True
elif line[i - 1] == '/' and c == '*':
self._is_ml_comment = True
i += 2
# end of number
elif not c.isdigit() and c != '.' and is_number:
self._add_tok(tokens, word, UnresolvedTokenTypes.Number)
local_tokens = self.tokenize(c)
for local_token in local_tokens:
tokens.append(local_token)
word = ''
is_number = False
# end of expression char
elif c not in LanguageDefinition.expr_chars and is_expr_char:
self._add_tok(tokens, word, UnresolvedTokenTypes.Expression_Character)
word = ''
is_expr_char = False
# begin of is_string1
elif c == '\'' and not is_string1:
is_string1 = True
word = ''
# end of is_string1
elif c == '\'' and is_string1:
is_string1 = False
self._add_tok(tokens, word, UnresolvedTokenTypes.String)
word = ''
# begin of is_string2
elif c == '\"' and not is_string2:
is_string2 = True
word = ''
# end of is_string2
elif c == '\"' and is_string2:
is_string2 = False
self._add_tok(tokens, word, UnresolvedTokenTypes.String)
word = ''
# format char
elif c in LanguageDefinition.format_chars:
self._add_tok(tokens, word, UnresolvedTokenTypes.Word)
self._add_tok(tokens, c, UnresolvedTokenTypes.Format_Character)
word = ''
# begin of number
elif c.isdigit() and not is_number and word == '':
word += c
is_number = True
# continue number
elif (c.isdigit() or c == '.') and is_number:
word += c
# begin expression char
elif c in LanguageDefinition.expr_chars and not is_expr_char:
word += c
is_expr_char = True
# continue expression char
elif c in LanguageDefinition.expr_chars and is_expr_char:
word += c
# bool expression char
elif c in LanguageDefinition.bool_expr_chars:
self._add_tok(tokens, word, UnresolvedTokenTypes.Word)
self._add_tok(tokens, c, UnresolvedTokenTypes.Bool_Expression_Character)
word = ''
# end of word
elif c == ' ' and not is_string1 and not is_string2 or c == '\n':
self._add_tok(tokens, word, UnresolvedTokenTypes.Word)
word = ''
else:
word += c
if c == '\n' and ol_comment:
ol_comment = False
if line[i - 1] == '*' and c == '/':
self._is_ml_comment = False
return tokens

1
src/parser/__init__.py Normal file
View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1 @@
# imports

34
src/parser/abc/ast.py Normal file
View File

@@ -0,0 +1,34 @@
from typing import Optional, Union
from cpl_query.extension.list import List
from parser.model.ast_types_enum import ASTTypesEnum
class AST():
def __init__(self, type: ASTTypesEnum, value: Union[str, List['AST']], start: Optional[int] = None, end: Optional[int] = None):
self._type = type
self._value = value
self._start = start
self._end = end
@property
def type(self) -> ASTTypesEnum:
return self._type
@property
def value(self) -> Union[str, List['AST']]:
return self._value
@property
def start(self) -> Optional[int]:
return self._start
@property
def end(self) -> Optional[int]:
return self._end
def __repr__(self) -> str:
return f'AST <Type: {self._type}> <Value: {self._value}> <{self._start},{self._end}>'
def __str__(self) -> str:
return f'AST <Type: {self._type}> <Value: {self._value}> <{self._start},{self._end}>'

View File

@@ -0,0 +1,14 @@
from abc import ABC, abstractmethod
from cpl_query.extension.list import List
from parser.abc.ast import AST
from lexer.model.token import Token
class ParserABC(ABC):
@abstractmethod
def __init__(self): pass
@abstractmethod
def create_ast(self, tokens: List[Token]) -> List[AST]: pass

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,17 @@
from enum import Enum
class ASTTypesEnum(Enum):
Access = 'access'
Keyword = 'keyword'
Name = 'name'
Type = 'type'
LibraryDeclaration = 'library_declaration'
ClassDeclaration = 'class_declaration'
FuncDeclaration = 'func_declaration'
VariableDeclaration = 'variable_declaration'
VariableValue = 'variable_value'
UseDirective = 'use_directive'
Arguments = 'arguments'
Constructor = 'constructor'

View File

@@ -0,0 +1,18 @@
from parser.abc.ast import AST
from cpl_query.extension.list import List
class ClassAST(AST):
def __init__(self):
AST.__init__(self)
self._body = List(AST)
@property
def body(self) -> List['AST']:
return self._body
@body.setter
def body(self, value: List['AST']):
self._body = value

View File

@@ -0,0 +1,18 @@
from parser.abc.ast import AST
from cpl_query.extension.list import List
class LibraryAST(AST):
def __init__(self):
AST.__init__(self)
self._body = List(AST)
@property
def body(self) -> List['AST']:
return self._body
@body.setter
def body(self, value: List['AST']):
self._body = value

View File

@@ -0,0 +1,8 @@
from enum import Enum
class ParserStateEnum(Enum):
Default = 0
Library = 1
Class = 2

44
src/parser/parser.json Normal file
View File

@@ -0,0 +1,44 @@
{
"ProjectSettings": {
"Name": "parser",
"Version": {
"Major": "0",
"Minor": "0",
"Micro": "0"
},
"Author": "",
"AuthorEmail": "",
"Description": "",
"LongDescription": "",
"URL": "",
"CopyrightDate": "",
"CopyrightName": "",
"LicenseName": "",
"LicenseDescription": "",
"Dependencies": [
"sh_cpl-core==2021.10.0.post1",
"sh_cpl-query==2021.10.0.post1"
],
"PythonVersion": ">=3.9.2",
"PythonPath": {
"linux": ""
},
"Classifiers": []
},
"BuildSettings": {
"ProjectType": "library",
"SourcePath": "",
"OutputPath": "../../dist",
"Main": "",
"EntryPoint": "",
"IncludePackageData": false,
"Included": [],
"Excluded": [
"*/__pycache__",
"*/logs",
"*/tests"
],
"PackageData": {},
"ProjectReferences": []
}
}

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,433 @@
from parser.abc.ast import AST
from parser.abc.parser_abc import ParserABC
from parser.model.ast_types_enum import ASTTypesEnum
from typing import Optional, Tuple
from cc_lang.model.language_definition_classes import (ExpressionCharacters,
FormatCharacters,
Keywords)
from cpl_core.console.console import Console
from cpl_query.extension.list import List
from lexer.model.token import Token
from lexer.model.token_types import TokenTypes
from runtime.abc.runtime_service_abc import RuntimeServiceABC
from runtime.model.error import Error
from runtime.model.error_codes_enum import ErrorCodesEnum
class ParserService(ParserABC):
def __init__(self, runtime: RuntimeServiceABC):
self._runtime = runtime
self._access_keywords = [
Keywords.Public.value,
Keywords.Private.value,
Keywords.Static.value,
Keywords.This.value,
]
def _parse_name(self, tokens: List[Token]) -> Tuple[AST, Token]:
""" Parses names
Args:
tokens (List[Token]): Tokens from lexer
AST:
Program
<name>
Program.Test
<name> <.> <name>
Returns:
Tuple[AST, Token]: Name as AST and last Token of Name
"""
name = ''
last_token: Optional[Token] = None
for i in range(0, tokens.count()):
token: Token = tokens[i]
if i == tokens.count() and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Point:
self._runtime.error(Error(ErrorCodesEnum.Unexpected), FormatCharacters.Point.value)
if token.type == TokenTypes.Name or token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Point.value:
name += token.value
else:
break
last_token = token
return (AST(ASTTypesEnum.Name, name, self._runtime.line_count, self._runtime.line_count), last_token)
def _parse_args(self, tokens: List[Token]) -> Tuple[AST, Token]:
""" Parses args
Args:
tokens (List[Token]): Tokens from lexer
AST:
()
<start> <end>
(i: number)
<start> <name> <:> <type> <end>
(i: number, name: string)
<start> <name> <:> <type> <,> <name> <:> <type> <end>
Returns:
Tuple[AST, Token]: Name as AST and last Token of Name
"""
if tokens.where(lambda t: t.type == TokenTypes.Format_Character and t.value == FormatCharacters.Left_Parenthesis.value).count() < 1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Left_Parenthesis.value))
else:
i = 1
if tokens.where(lambda t: t.type == TokenTypes.Format_Character and t.value == FormatCharacters.Right_Parenthesis.value).count() < 1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Right_Parenthesis.value))
ast = List(AST)
last_token: Optional[Token] = None
expected_var = False
# for i in range(0, tokens.count()):
while i < tokens.count():
token: Token = tokens[i]
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Right_Parenthesis.value:
last_token = token
break
if expected_var and token.type != TokenTypes.Name:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
if token.type == TokenTypes.Name:
if i + 1 >= tokens.count():
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Colon.value))
if i + 2 >= tokens.count():
self._runtime.error(Error(ErrorCodesEnum.Expected, Keywords.Type.value))
var_ast = List(AST)
var_ast.append(AST(ASTTypesEnum.Name, token.value, self._runtime.line_count, self._runtime.line_count))
var_ast.append(AST(ASTTypesEnum.Type, tokens[i+2].value, self._runtime.line_count, self._runtime.line_count))
ast.append(AST(ASTTypesEnum.VariableDeclaration, var_ast, self._runtime.line_count, self._runtime.line_count))
i += 2
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Comma.value:
expected_var = True
last_token = token
i += 1
return (AST(ASTTypesEnum.Arguments, ast, self._runtime.line_count, self._runtime.line_count), last_token)
def _parse_library_or_class(self, tokens: List[Token], cls=False) -> AST:
""" Parses library or class declarations
Args:
tokens (List[Token]): Tokens from lexer
AST:
lib Main {
<lib, class> <name> <end>
public lib Main {
<access> <lib, class> <name> <end>
public lib Main {}
<access> <lib, class> <name> <end> <end>
public lib Main.Test {
<access> <lib, class> <name> <.> <name> <end> <end>
Returns:
AST: Library or class AST
"""
end = None
ast = List(AST)
i = 0
while i < tokens.count():
token: Token = tokens[i]
# if line contains }
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Right_Brace.value:
end = self._runtime.line_count
elif i == tokens.count()-1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Brace.value:
break
elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Left_Brace.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value in self._access_keywords:
ast.append(AST(ASTTypesEnum.Access, token.value, self._runtime.line_count, self._runtime.line_count))
elif i <= 1 and token.type == TokenTypes.Keyword and token.value == Keywords.Library.value and not cls:
ast.append(AST(ASTTypesEnum.Keyword, token.value, self._runtime.line_count, self._runtime.line_count))
elif i <= 1 and token.type == TokenTypes.Keyword and token.value == Keywords.Class.value and cls:
ast.append(AST(ASTTypesEnum.Keyword, token.value, self._runtime.line_count, self._runtime.line_count))
elif i >= 1 and token.type == TokenTypes.Name:
name, last_token = self._parse_name(tokens.skip(i))
if last_token is not None:
i = tokens.index(last_token)
ast.append(name)
else:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
i += 1
return AST(ASTTypesEnum.LibraryDeclaration if not cls else ASTTypesEnum.ClassDeclaration, ast, self._runtime.line_count, end)
def _parse_variable(self, tokens: List[AST]) -> AST:
""" Parses variable declarations
Args:
tokens (List[Token]): Tokens from lexer
AST:
var test: number;
<var> <name> <type> <end>
var test = 0;
<var> <name> <value | int, str, bool> <end>
var test: number = 0;
<var> <name> <type> <value | int, str, bool> <end>
var test: number = test;
<var> <name> <type> <name> <end>
var test: number = TestClass();
<var> <name> <type> <object-assign> <end>
private var test: number = 0;
<access> <var> <name> <type> <end>
Returns:
AST: Library or class AST
"""
end = None
ast = List(AST)
i = 0
# for i in range(0, tokens.count()):
while i < tokens.count():
token: Token = tokens[i]
if i == tokens.count()-1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Semicolon.value:
break
elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Semicolon.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value in self._access_keywords:
ast.append(AST(ASTTypesEnum.Access, token.value, self._runtime.line_count, self._runtime.line_count))
elif i >= 0 and token.type == TokenTypes.Keyword and Keywords.Variable.value:
ast.append(AST(ASTTypesEnum.Keyword, token.value, self._runtime.line_count, self._runtime.line_count))
elif i >= 1 and token.type == TokenTypes.Name:
name, last_token = self._parse_name(tokens.skip(i))
i = tokens.index(last_token)
ast.append(name)
elif i >= 2 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Colon.value:
if i+1 < tokens.count():
next_token = tokens[i+1]
ast.append(AST(ASTTypesEnum.Type, next_token.value, self._runtime.line_count, self._runtime.line_count))
i += 1
else:
self._runtime.error(Error(ErrorCodesEnum.Expected, ASTTypesEnum.Type.value))
elif i >= 2 and token.type == TokenTypes.Expression_Character and token.value == ExpressionCharacters.Equal.value:
if i+1 < tokens.count():
end = tokens.where(lambda t: t.type == TokenTypes.Format_Character and t.value == FormatCharacters.Semicolon.value).first_or_default()
value = ''
if end is not None:
for t in tokens[i+1:]:
t: Token = t
if t == end:
break
value += t.value
ast.append(AST(ASTTypesEnum.VariableValue, value, self._runtime.line_count, self._runtime.line_count))
i = tokens.index(end)
else:
self._runtime.error(Error(ErrorCodesEnum.Expected, ASTTypesEnum.Type.value))
elif i == tokens.count()-1 and token.type != TokenTypes.Format_Character and token.value != FormatCharacters.Semicolon.value:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Semicolon.value))
else:
if i == 1 or i == 2 and token.type == TokenTypes.Type:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, f'{token.type.name.lower()}: {token.value}'))
else:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, {token.value}))
i += 1
return AST(ASTTypesEnum.VariableDeclaration, ast, self._runtime.line_count, self._runtime.line_count)
def _parse_func(self, tokens: List[Token]) -> AST:
"""[summary]
Args:
tokens (List[Token]): [description]
Returns:
AST: [description]
"""
end = None
ast = List(AST)
i = 0
while i < tokens.count():
token: Token = tokens[i]
# if line contains }
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Right_Brace.value:
end = self._runtime.line_count
elif i == tokens.count()-1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Brace.value:
break
elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Left_Brace.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value in self._access_keywords:
ast.append(AST(ASTTypesEnum.Access, token.value, self._runtime.line_count, self._runtime.line_count))
elif i <= 1 and token.type == TokenTypes.Keyword and token.value == Keywords.Function.value:
ast.append(AST(ASTTypesEnum.Keyword, token.value, self._runtime.line_count, self._runtime.line_count))
elif i >= 1 and token.type == TokenTypes.Name:
ast.append(AST(ASTTypesEnum.Name, token.value, self._runtime.line_count, self._runtime.line_count))
elif i >= 2 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Parenthesis.value:
args, last_token = self._parse_args(tokens.skip(i))
if last_token is not None:
i = tokens.index(last_token)
ast.append(args)
elif i >= 3 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Colon.value:
if i+1 > tokens.count():
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Colon.value))
if i+2 > tokens.count():
self._runtime.error(Error(ErrorCodesEnum.Expected, Keywords.Type.value))
if i+3 > tokens.count():
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Left_Brace.value))
ast.append(AST(ASTTypesEnum.Type, tokens[i+1].value, self._runtime.line_count, self._runtime.line_count))
i += 3
else:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
i += 1
return AST(ASTTypesEnum.FuncDeclaration, ast, self._runtime.line_count, end)
def _parse_use(self, tokens: List[Token]) -> AST:
""" Parses use imports
Args:
tokens (List[Token]): Tokens from lexer
AST:
use Program;
<use> <name> <end>
use Program.Test;
<use> <name> <.> <name> <end>
Returns:
AST: Library or class AST
"""
ast = List(AST)
i = 0
# for i in range(0, tokens.count()):
while i < tokens.count():
token: Token = tokens[i]
if i == tokens.count()-1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Semicolon.value:
break
elif i == tokens.count()-1:
self._runtime.error(Error(ErrorCodesEnum.Expected, FormatCharacters.Semicolon.value))
elif i == 0 and token.type == TokenTypes.Keyword and token.value == Keywords.Use.value:
self._ast.append(AST(ASTTypesEnum.Keyword, token.value))
elif i == 1 and token.type == TokenTypes.Name:
name, last_token = self._parse_name(tokens.skip(i))
if last_token is not None:
i = tokens.index(last_token)
ast.append(name)
else:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
i += 1
return AST(ASTTypesEnum.UseDirective, ast, self._runtime.line_count, self._runtime.line_count)
def _parse_constructor(self, tokens: List[Token]) -> AST:
""" Parses constructor
Args:
tokens (List[Token]): Tokens from lexer
AST:
constructor() {
<constructor> <(> <)> <{>
constructor(i: number) {
<constructor> <(> <name> <:> <type> <)> <{>
constructor(i: number, name: string) {
<constructor> <(> <name> <:> <type> <,> <name> <:> <)> <{>
Returns:
AST: Library or class AST
"""
ast = List(AST)
end = None
i = 0
while i < tokens.count():
token: Token = tokens[i]
if token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Brace.value:
break
elif token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Right_Brace.value:
end = self._runtime.line_count
break
elif i == 0 and token.type == TokenTypes.Keyword and token.value == Keywords.Constructor.value:
ast.append(AST(ASTTypesEnum.Keyword, token.value, self._runtime.line_count, self._runtime.line_count))
elif i == 1 and token.type == TokenTypes.Format_Character and token.value == FormatCharacters.Left_Parenthesis.value:
args, last_token = self._parse_args(tokens.skip(i))
if last_token is not None:
i = tokens.index(last_token)
ast.append(args)
else:
self._runtime.error(Error(ErrorCodesEnum.Unexpected, token.value))
i += 1
return AST(ASTTypesEnum.Constructor, ast, self._runtime.line_count, end)
def create_ast(self, tokens: List[Token]) -> List[AST]:
self._ast = List(AST)
if tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Use.value).count() > 0:
self._ast.append(self._parse_use(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Library.value).count() > 0:
self._ast.append(self._parse_library_or_class(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Class.value).count() > 0:
self._ast.append(self._parse_library_or_class(tokens, True))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Function.value).count() > 0:
self._ast.append(self._parse_func(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Variable.value).count() > 0:
self._ast.append(self._parse_variable(tokens))
elif tokens.where(lambda t: t.type == TokenTypes.Keyword and t.value == Keywords.Constructor.value).count() > 0:
self._ast.append(self._parse_constructor(tokens))
return self._ast

1
src/runtime/__init__.py Normal file
View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod
class ClassStackABC(ABC):
@abstractmethod
def __init__(self): pass

View File

@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod
class FunctionStackABC(ABC):
@abstractmethod
def __init__(self): pass

View File

@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod
class LibraryStackABC(ABC):
@abstractmethod
def __init__(self): pass

View File

@@ -0,0 +1,39 @@
from abc import ABC, abstractmethod
from cpl_core.console.console import Console
from cpl_core.console.foreground_color_enum import ForegroundColorEnum
from runtime.model.error import Error
class RuntimeServiceABC(ABC):
@abstractmethod
def __init__(self): pass
@property
@abstractmethod
def line_count(self) -> int: pass
@line_count.setter
@abstractmethod
def line_count(self, line_count: int): pass
@property
def file(self) -> str:
return self._file
@file.setter
def file(self, value: str):
self._file = value
@abstractmethod
def input(self, prefix: str) -> str: pass
@abstractmethod
def output(self, text: str): pass
@abstractmethod
def error(self, error: Error): pass
@abstractmethod
def runtime_error(self, error: Error): pass

View File

@@ -0,0 +1,7 @@
from abc import ABC, abstractmethod
class VariableStackABC(ABC):
@abstractmethod
def __init__(self): pass

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,16 @@
from runtime.model.error_codes_enum import ErrorCodesEnum
class Error:
def __init__(self, code: ErrorCodesEnum, msg: str):
self._code = code
self._msg = code.value.format(msg)
@property
def code(self) -> ErrorCodesEnum:
return self._code
@property
def message(self) -> str:
return self._msg

View File

@@ -1,7 +1,8 @@
from enum import Enum
class ErrorCodes(Enum):
class ErrorCodesEnum(Enum):
StartFailed = 'Start failed'
FileNotFound = 'File not found'
WrongFileType = 'Wrong file type'
@@ -18,11 +19,3 @@ class ErrorCodes(Enum):
ClassInFunc = 'Class in func'
FuncInLib = 'Func in lib'
FuncInFunc = 'Func in func'
class Error:
def __init__(self, code: ErrorCodes, msg: str = '') -> None:
self.code = code
self.msg = code.value.format(msg)

44
src/runtime/runtime.json Normal file
View File

@@ -0,0 +1,44 @@
{
"ProjectSettings": {
"Name": "runtime",
"Version": {
"Major": "0",
"Minor": "0",
"Micro": "0"
},
"Author": "",
"AuthorEmail": "",
"Description": "",
"LongDescription": "",
"URL": "",
"CopyrightDate": "",
"CopyrightName": "",
"LicenseName": "",
"LicenseDescription": "",
"Dependencies": [
"sh_cpl-core==2021.10.0.post1",
"sh_cpl-query==2021.10.0.post1"
],
"PythonVersion": ">=3.9.2",
"PythonPath": {
"linux": ""
},
"Classifiers": []
},
"BuildSettings": {
"ProjectType": "library",
"SourcePath": "",
"OutputPath": "../../dist",
"Main": "",
"EntryPoint": "",
"IncludePackageData": false,
"Included": [],
"Excluded": [
"*/__pycache__",
"*/logs",
"*/tests"
],
"PackageData": {},
"ProjectReferences": []
}
}

View File

@@ -0,0 +1 @@
# imports

View File

@@ -0,0 +1,48 @@
from cpl_core.console import Console, ForegroundColorEnum
from runtime.abc.runtime_service_abc import RuntimeServiceABC
from runtime.model.error import Error
class RuntimeService(RuntimeServiceABC):
def __init__(self):
self._line_count = 0
self._file = ''
@property
def line_count(self) -> int:
return self._line_count
@line_count.setter
def line_count(self, line_count: int):
self._line_count = line_count
@property
def file(self) -> str:
return self._file
@file.setter
def file(self, value: str):
self._file = value
def input(self, prefix: str) -> str:
return Console.read_line(prefix)
def output(self, text: str) -> None:
Console.write_line(f'> {text}')
def error(self, error: Error) -> None:
Console.set_foreground_color(ForegroundColorEnum.red)
if self._file is not None:
Console.write_line(f'Error in {self._file} line {self._line_count}\n{error.message}')
else:
Console.write_line(f'Error in line {self._line_count}\n{error.message}')
Console.color_reset()
Console.write_line()
exit()
def runtime_error(self, error: Error) -> None:
Console.set_foreground_color(ForegroundColorEnum.red)
Console.write_line(f'{error.message}', 'red')
Console.color_reset()
Console.write_line()
exit()

View File

@@ -1,6 +0,0 @@
output(3+3+3);
output(3);
output("Hallo");
output(false);
output(true);
output(input("Name: "));

View File

@@ -1,3 +0,0 @@
test = 3 ** 2
print(test)
print(eval('3**2'))

1
src/tests/__init__.py Normal file
View File

@@ -0,0 +1 @@
# imports: