mirror of
https://github.com/minetest-mods/mesecons.git
synced 2025-07-01 15:20:23 +02:00
Fixes formatting.
This commit is contained in:
@ -334,7 +334,7 @@ local function create_environment(pos, mem, event)
|
||||
env.pcall=function(...)
|
||||
local pcr={pcall(...)}
|
||||
if not pcr[1] then
|
||||
if pcr[2]~=timeout_error then
|
||||
if pcr[2] ~= timeout_error then
|
||||
error(pcr[2])
|
||||
end
|
||||
end
|
||||
@ -352,7 +352,7 @@ end
|
||||
|
||||
--A VERY minimalistic lexer, does what it needs to for this job and no more.
|
||||
local function lexLua(code)
|
||||
local lexElements={}
|
||||
local lexElements = {}
|
||||
--Find keywords, whitespace, strings, and then everything else is "cleanup"
|
||||
|
||||
--Keywords and numbers.
|
||||
@ -360,7 +360,7 @@ local function lexLua(code)
|
||||
return str:match("^[%w_]+")
|
||||
end
|
||||
function lexElements.whitespace(str)
|
||||
return str:match("^%s+")
|
||||
return str:match("^[\r \t]+")
|
||||
end
|
||||
--Unimplemented stuff goes here.
|
||||
function lexElements.cleanup(str)
|
||||
@ -368,42 +368,47 @@ local function lexLua(code)
|
||||
end
|
||||
function lexElements.string(str)
|
||||
--Now parse a string.
|
||||
local quoteType=str:sub(1,1)
|
||||
if quoteType~="\"" and quoteType~="\'" then return nil end
|
||||
local inEscape=true
|
||||
local rstr=""
|
||||
for i=1,str:len() do
|
||||
local c=str:sub(i,i)
|
||||
rstr=rstr..c
|
||||
local quoteType = str:sub(1,1)
|
||||
if quoteType ~= "\"" and quoteType ~= "\'" then return nil end
|
||||
local inEscape = true
|
||||
local rstr = ""
|
||||
for i=1, str:len() do
|
||||
local c = str:sub(i,i)
|
||||
rstr = rstr..c
|
||||
if inEscape then
|
||||
inEscape=false
|
||||
inEscape = false
|
||||
else
|
||||
if c==quoteType then return rstr end
|
||||
if c=="\\" then inEscape=true end
|
||||
if c == quoteType then return rstr end
|
||||
if c == "\\" then inEscape = true end
|
||||
end
|
||||
end
|
||||
return nil --unfinished string
|
||||
end
|
||||
function lexElements.blockcomment(str)
|
||||
local a=str:match("^%-%-%[%[")
|
||||
local a = str:match("^%-%-%[%[")
|
||||
if not a then return nil end
|
||||
local s=str:find("%-%-%]%]")
|
||||
local s = str:find("%-%-%]%]")
|
||||
if not s then return nil end
|
||||
return str:sub(1,s+3)
|
||||
end
|
||||
function lexElements.linecomment(str)
|
||||
return str:match("^%-%-[^\r\n]+")
|
||||
end
|
||||
local lexElementsOrder={"keyword","whitespace","string","blockcomment","linecomment","cleanup"}
|
||||
local lexResults={}
|
||||
while code:len()>0 do
|
||||
-- Note: EOL is not reliable for linecounting purposes, but keeps the code better formatted, if anything
|
||||
function lexElements.eol()
|
||||
return str:match("^[\n]+")
|
||||
end
|
||||
local lexElementsOrder = {"keyword", "eol", "whitespace",
|
||||
"string", "blockcomment", "linecomment", "cleanup"}
|
||||
local lexResults = {}
|
||||
while code:len() > 0 do
|
||||
--Because break doesn't exist.
|
||||
local function parseElem()
|
||||
for _,v in ipairs(lexElementsOrder) do
|
||||
local t,e=lexElements[v](code)
|
||||
local t, e=lexElements[v](code)
|
||||
if t then
|
||||
code=code:sub(t:len()+1)
|
||||
table.insert(lexResults,{v,t})
|
||||
code=code:sub(t:len() + 1)
|
||||
table.insert(lexResults,{v, t})
|
||||
return nil
|
||||
end
|
||||
if e then
|
||||
@ -413,7 +418,7 @@ local function lexLua(code)
|
||||
return "no match"
|
||||
end
|
||||
local err=parseElem()
|
||||
if err then return nil,"Lexer Error:"..err..": "..code:sub(1,32) end
|
||||
if err then return nil, "Lexer Error:"..err..": "..code:sub(1, 32) end
|
||||
end
|
||||
return lexResults
|
||||
end
|
||||
@ -432,30 +437,31 @@ local function code_prohibited(code)
|
||||
-- This only exists in Lua 5.2
|
||||
--(dummy) goto
|
||||
|
||||
local lexed,err=lexLua(code)
|
||||
local rcode=""
|
||||
local lexed, err=lexLua(code)
|
||||
local rcode = ""
|
||||
if err then
|
||||
return nil,"Couldn't lex code:"..err
|
||||
return nil, "Couldn't lex code:"..err
|
||||
end
|
||||
for _,v in ipairs(lexed) do
|
||||
--remove useless stuff since we're going over it anyway.
|
||||
if v[1]=="whitespace" then v[2]="\r\n" end
|
||||
if v[1]=="blockcomment" then v[2]="" end
|
||||
if v[1]=="linecomment" then v[2]="" end
|
||||
for _, v in ipairs(lexed) do
|
||||
--remove/reduce useless stuff since we're going over it anyway.
|
||||
if v[1] == "whitespace" then v[2] = " " end
|
||||
if v[1] == "eol" then v[2] = "\r\n" end -- Windows users may like the \r
|
||||
if v[1] == "blockcomment" then v[2] = "" end
|
||||
if v[1] == "linecomment" then v[2] = "" end
|
||||
--Code injection so we can safely use every feature in Lua. The only purpose of the lexer is to filter stuff that would break this, really.
|
||||
--(the old method would stop you from using keywords in strings.)
|
||||
--You can also replace this with coroutine.yield, but that's best left to a computercraft-style mod.
|
||||
local dummy="(function() end)()"
|
||||
if v[2]=="do" then
|
||||
v[2]="do "..dummy
|
||||
local dummy = "(function() end)()"
|
||||
if v[2] == "do" then
|
||||
v[2] = "do "..dummy
|
||||
end
|
||||
if v[2]=="repeat" then
|
||||
v[2]="repeat "..dummy
|
||||
if v[2] == "repeat" then
|
||||
v[2] = "repeat "..dummy
|
||||
end
|
||||
if v[2]=="goto" then
|
||||
v[2]=dummy.." goto"
|
||||
if v[2] == "goto" then
|
||||
v[2] = dummy.." goto"
|
||||
end
|
||||
rcode=rcode..v[2]
|
||||
rcode = rcode..v[2]
|
||||
end
|
||||
return rcode
|
||||
end
|
||||
@ -507,7 +513,7 @@ local function run(pos, event)
|
||||
local mem = load_memory(meta)
|
||||
local code = meta:get_string("code")
|
||||
local err
|
||||
code,err = code_prohibited(code)
|
||||
code, err = code_prohibited(code)
|
||||
if not code then return err end
|
||||
|
||||
-- Create environment
|
||||
|
Reference in New Issue
Block a user