Skip to content
This repository has been archived by the owner on Aug 7, 2023. It is now read-only.

Commit

Permalink
Spread Parts Out
Browse files Browse the repository at this point in the history
  • Loading branch information
LonnonjamesD authored and LonnonjamesD committed May 2, 2021
1 parent ffe82db commit 852584f
Show file tree
Hide file tree
Showing 7 changed files with 201 additions and 154 deletions.
161 changes: 7 additions & 154 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,179 +2,32 @@
import msvcrt as m
import time

from src.lexer import Lexer
from src.parser import Parser

try:
f = (open(str(sys.argv[1]), "r"))
except:
f = open("text.sol", "r")
text = []
for iterate in f:
text += [iterate]
tokens = [
"out",
'"',
"outln",
"vent",
"var",
"=",
"|"
]
tokensdicc = {
"out": "FUNC",
'"': "STRING",
"outln": "FUNC",
"vent": "FUNC",
"var": "DECLARE",
"=": "ASSIGNMENT",
"|": "VARUSE"
}
storedvars = {}

#* Pairs function from lua
def pairs(o):
if isinstance(o,dict):
return o.items()
else:
return enumerate(o)

#-----------------------------------------------------------------|
#- |
#- Lexer |
#- |
#-----------------------------------------------------------------|

#* This turns the code into a Lexed List that the Parser can understand and run
def Lexer(text):
for i, index in pairs(text):
text[i] = text[i].rstrip("\n")
iterate = 0
Lexed = []
while iterate != len(text):
check = ""
for char in text[iterate]:
check += char
if check in tokens:
if check == "out":
if text[iterate][0:5] == "outln":
Lexed += ["outln"]
check = ""
else:
Lexed += [check]
check = ""
else:
Lexed += [check]
check = ""
elif char in tokens:
#_ This gets the value inside of the of the Quotes
if char == '"' and Lexed[-1] == '"':
Lexed += [f"{check[:-1]}/S"]
Lexed += [f"{char}"]
check = ""
#_ Get's the inline var, var name
elif char == '|' and Lexed[-1] == '|':
Lexed += [f"{check[:-1]}/V"]
Lexed += [f"{char}"]
check = ""
#_ Get Assignment operator and name of the var
elif char == "=":
Final = check
Final == Final[:-3]
Final = Final.replace(" ", "")
Final = Final.split("=")
Final[1] = "="
Lexed += Final
check = ""
else:
Lexed += [char]
check = ""
iterate += 1
return Lexed

#* This tokenizes to make it understandable for the Parser
def Tokinzier(Lexed):
iterate = 0
Tokenized = []
while iterate != len(Lexed):
try:
Tokenized += [[Lexed[iterate], tokensdicc[Lexed[iterate]]]]
except:
LexedEnd = Lexed[iterate][-2:]
if LexedEnd == "/S":
Final = Lexed[iterate]
Final = Final[:-2]
Tokenized += [[Final, "STRING"]]
elif LexedEnd == "/V":
Final = Lexed[iterate]
Final = Final[:-2]
Tokenized += [[Final, "VAR"]]
elif Lexed[iterate-1] == "var":
Tokenized += [[Lexed[iterate], "VARNAME"]]
iterate += 1
return Tokenized


#-----------------------------------------------------------------|
#- |
#- Built in Functions |
#- |
#-----------------------------------------------------------------|

#& Prints text without a newline
def outFUNC(text):
text = text.replace("\\n", "\n")
print(f"{text}", end='')
#& Prints text with a new line at the end
def outlnFUNC(text):
text = text.replace("\\n", "\n")
print(f"{text}", end='\n')
#& Exits the program
def vent():
try:
def wait():
m.getch()
print("\n\nPress any key to continue...")
wait()
except:
os.system('read -s -n 1 -p "Press any key to continue..."')
exit()

#-----------------------------------------------------------------|
#- |
#- Parser |
#- |
#-----------------------------------------------------------------|

#* This checks the Lexed code and parses it and runs it
def Parser(text):
iterate = 0
while iterate != len(text):
#_ Look for the word out and make sure it has the type of FUNC
if text[iterate][1] == "FUNC":
if text[iterate+1][1] == "STRING" and text[iterate+2][1] == "STRING" and text[iterate+3][1] == "STRING":
if text[iterate][0] == "out":
outFUNC(text[iterate+2][0])
elif text[iterate][0] == "outln":
outlnFUNC(text[iterate+2][0])
elif text[iterate][0] == "vent" and text[iterate][1] == "FUNC":
vent()
#_ Look for the word var and make sure it has the type of DECLARE
elif text[iterate][0] == "var" and text[iterate][1] == "DECLARE":
print()
iterate += 1

#-----------------------------------------------------------------|
#- |
#- Run The Code |
#- |
#-----------------------------------------------------------------|

LexedVersion = Lexer(text)
Tokinzied = Lexer(LexedVersion)
Parser(Tokinzied)
print(LexedVersion)
Parser(LexedVersion)
#time.sleep(10)
try:
"""try:
def wait():
m.getch()
print("\n\nPress any key to continue...")
wait()
except:
os.system('read -s -n 1 -p "Press any key to continue..."')
os.system('read -s -n 1 -p "Press any key to continue..."')"""
34 changes: 34 additions & 0 deletions main.spec
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# -*- mode: python ; coding: utf-8 -*-


block_cipher = None


a = Analysis(['main.py'],
pathex=['E:\\Coding Shit\\Solis'],
binaries=[],
datas=[],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
win_no_prefer_redirects=False,
win_private_assemblies=False,
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data,
cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
[],
name='main',
debug=False,
bootloader_ignore_signals=False,
strip=False,
upx=True,
upx_exclude=[],
runtime_tmpdir=None,
console=True )
27 changes: 27 additions & 0 deletions src/builtins.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@

#-----------------------------------------------------------------|
#- |
#- Built in Functions |
#- |
#-----------------------------------------------------------------|

#& Prints text without a newline
def outFUNC(text):
text = text.replace("\\n", "\n")
print(f"{text}", end='')

#& Prints text with a new line at the end
def outlnFUNC(text):
text = text.replace("\\n", "\n")
print(f"{text}", end='\n')

#& Exits the program
def vent():
try:
def wait():
m.getch()
print("\n\nPress any key to continue...")
wait()
except:
os.system('read -s -n 1 -p "Press any key to continue..."')
exit()
85 changes: 85 additions & 0 deletions src/lexer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from src.tokens import tokens, tokensdict

#* Pairs function from lua
def pairs(o):
if isinstance(o, dict):
return o.items()
else:
return enumerate(o)


#-----------------------------------------------------------------|
#- |
#- Lexer & Tokenizier |
#- |
#-----------------------------------------------------------------|

#* This turns the code into a Lexed List that the Parser can understand and run
def Lexer(text):
for i, index in pairs(text):
text[i] = text[i].rstrip("\n")
iterate = 0
Lexed = []
while iterate != len(text):
check = ""
for char in text[iterate]:
check += char
if check in tokens:
if check == "out":
if text[iterate][0:5] == "outln":
Lexed += ["outln"]
check = ""
else:
Lexed += [check]
check = ""
else:
Lexed += [check]
check = ""
elif char in tokens:
#_ This gets the value inside of the of the Quotes
if char == '"' and Lexed[-1] == '"':
Lexed[-1] = ""
Lexed += [f"{check[:-1]}/S"]
check = ""
#_ Get's the inline var, var name
elif char == '|' and Lexed[-1] == '|':
Lexed[-1] = ""
Lexed += [f"{check[:-1]}/V"]
check = ""
#_ Get Assignment operator and name of the var
elif char == "=":
Final = check
Final == Final[:-3]
Final = Final.replace(" ", "")
Final = Final.split("=")
Final[1] = "="
Lexed += Final
check = ""
else:
Lexed += [char]
check = ""
iterate += 1
Tokenizied = Tokinzier(Lexed)
return Tokenizied

#* This tokenizes to make it understandable for the Parser
def Tokinzier(Lexed):
iterate = 0
Tokenized = []
while iterate != len(Lexed):
try:
Tokenized += [[Lexed[iterate], tokensdict[Lexed[iterate]]]]
except:
LexedEnd = Lexed[iterate][-2:]
if LexedEnd == "/S":
Final = Lexed[iterate]
Final = Final[:-2]
Tokenized += [[Final, "STRING"]]
elif LexedEnd == "/V":
Final = Lexed[iterate]
Final = Final[:-2]
Tokenized += [[Final, "VAR"]]
elif Lexed[iterate-1] == "var":
Tokenized += [[Lexed[iterate], "VARNAME"]]
iterate += 1
return Tokenized
29 changes: 29 additions & 0 deletions src/parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from src.builtins import *

#-----------------------------------------------------------------|
#- |
#- Parser |
#- |
#-----------------------------------------------------------------|

#* This checks the Lexed code and parses it and runs it
def Parser(text):
iterate = 0
while iterate != len(text):
#_ Look for the word out and make sure it has the type of FUNC
if text[iterate][1] == "FUNC":
if text[iterate+1][1] == "STRING" and not text[iterate+1][0] == '"':
if text[iterate][0] == "out":
outFUNC(text[iterate+1][0])
elif text[iterate][0] == "outln":
outlnFUNC(text[iterate+1][0])
elif text[iterate+1][1] == "STRING" and text[iterate+1][0] == '"':
print(f"""\rToo Little or Too many \"'s at
{text[iterate][0]} {text[iterate+1][0]}\n""")
#_ Look for the word vent and make sure it has the type of FUNC
elif text[iterate][0] == "vent":
vent()
#_ Look for the word var and make sure it has the type of DECLARE
elif text[iterate][0] == "var" and text[iterate][1] == "DECLARE":
print()
iterate += 1
18 changes: 18 additions & 0 deletions src/tokens.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
tokens = [
"out",
'"',
"outln",
"vent",
"var",
"=",
"|"
]
tokensdict = {
"out": "FUNC",
'"': "STRING",
"outln": "FUNC",
"vent": "FUNC",
"var": "DECLARE",
"=": "ASSIGNMENT",
"|": "VARUSE"
}
1 change: 1 addition & 0 deletions text.sol
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
var yourmom = "hello"
outln """
outln |yourmom|

0 comments on commit 852584f

Please sign in to comment.