Major update

This commit is contained in:
Coder12a 2019-01-08 00:32:34 -06:00
parent 60bc174189
commit 02796f2998
3 changed files with 866 additions and 884 deletions

134
README.md
View File

@ -11,28 +11,28 @@ Copy both *colddb.lua* and *async* files to your minetest mod or game. Copy the
Write this code in your lua file.
1. create a directory and link it as a database.
```lua
coldbase = colddb.get_db("mydb")
coldbase = colddb.Colddb("mydb")
```
2. add an extra folder to the directory. every new file will be added to the global tag(folder).
```lua
colddb.add_global_tag(coldbase,"ips")
coldbase.add_global_tag("ips")
```
3. store key item(this key has no value)
```lua
colddb.set_key(coldbase,"MyKey")
coldbase.set_key("MyKey")
```
4. store key-value item
```lua
colddb.set(coldbase,"MyKeyAndValue","Hello world")
coldbase.set("MyKeyAndValue", "Hello world")
```
5. retrieve items (get_key's callback(arg) will return true, false, or nil)
```lua
colddb.get(coldbase,"MyKeyAndValue",nil,function(arg)
coldbase.get("MyKeyAndValue", nil, function(arg)
if arg then
minetest.log(string.format("value:%s", arg))
end
end)
colddb.get_key(coldbase,"MyKey",nil,function(arg)
coldbase.get_key("MyKey", nil, function(arg)
if arg then
minetest.log("Found key")
else
@ -42,7 +42,7 @@ end)
```
6. delete key(file) this function works on both keys and key-value keys.
```lua
colddb.remove(coldbase,"MyKeyAndValue")
coldbase.remove("MyKeyAndValue")
```
7. if add_to_mem_pool is true(true by default). keys are stored in a weak lua table(memory) it will be removed by the gc if its not in-use. Storing data in memory is to prevent the database from constantly loading up data from files.
```lua
@ -54,23 +54,23 @@ coldbase.indexes = true
```
9. only if coldbase.indexes is true. returns the amount of keys that are in the indexing file.
```lua
colddb.get_count(coldbase)
coldbase.get_count()
```
10. only if coldbase.indexes is true. iterates through the indexing file(breaks and ends if it reaches the end of the file).
```lua
colddb.iterate_index_table(coldbase,nil,func_list_keys,nil)
coldbase.iterate_index_table(nil, func_list_keys, nil)
```
11. adds a folder which can be used in other functions that have tag_name arg.
```lua
colddb.add_tag(coldbase,"Extra_Folder",{"Extra","Folder"})
coldbase.add_tag("Extra_Folder", {"Extra", "Folder"})
```
12. returns the tag name if the tag does not exists it creates one.
```lua
colddb.get_or_add_tag(coldbase,"Extra_Folder",{"Extra","Folder"})
coldbase.get_or_add_tag("Extra_Folder", {"Extra", "Folder"})
```
13. remove tag by name.
```lua
colddb.remove_tag(coldbase,"Extra_Folder")
coldbase.remove_tag("Extra_Folder")
```
Quick Look
@ -78,28 +78,11 @@ Quick Look
```lua
-- create an directory(watchlist) and link it as a database.
ip_db = colddb.get_db("watchlist")
-- add an extra folder to the directory.
colddb.add_global_tag(ip_db,"ips")
-- return a recorded ip address from the data base.
function ip_db.find(player,callback)
colddb.get(ip_db,player,nil,callback)
end
-- Key is the file and file name. Value is the content's within the file.
-- global tag(ips)--->key(Player name)--->value(ip address)
function ip_db.record_ip(player,ip)
colddb.set(ip_db,player,ip)
end
function ip_db.delete(player)
colddb.remove(db,player)
end
ip_db = colddb.Colddb("watchlist")
-- When ever a player join's his/her ip address is recorded to the database by player name.
minetest.register_on_prejoinplayer(function(name, ip)
ip_db.record_ip(name,ip)
ip_db.set(name, ip, ip_db.get_or_add_tag("ips", "ips"))
end)
minetest.register_chatcommand("ip", {
@ -107,8 +90,8 @@ minetest.register_chatcommand("ip", {
description = "Get an player's ip address.",
func = function(name, param)
-- Get the ip record asynchronously.
colddb.get(ip_db,param,nil,function(record)
-- If database contains the record data then send it to the player.
ip_db.get(param, ip_db.get_or_add_tag("ips", "ips"), function(record)
-- If record is contains data send it to the player.
if record then
minetest.chat_send_player(name, string.format("%s:%s", param, record))
else
@ -118,6 +101,16 @@ minetest.register_chatcommand("ip", {
end)
end
})
minetest.register_chatcommand("clear", {
params = "<player>",
description = "Clear out the ip database.",
func = function(name, param)
ip_db.remove_tag(ip_db.get_or_add_tag("ips", "ips"))
minetest.chat_send_player(name, "Ip Database Cleared!")
end
})
```
Quick Look Notes
@ -126,81 +119,6 @@ Quick Look Notes
In the example above we could also create a more complex ip database using tags. Creating tags named after the player then assigning the ip files to them.<br>
This way we could store many ips associated with the player instead of just one ip.
API
===========
- **Functions**
- **colddb.get_db(directory) --> db**
Creates an directory and links it as a database. Returns a 'db' obeject.
- **colddb.add_global_tag(db,tag)**
Adds an extra folder to the directory and advance the database to the added folder.
- **colddb.add_tag(db,name,tag)**
- Creates a folder from the given table in tag.
- **colddb.get_or_add_tag(db,name,tag) --> tag_name**
Returns a tag or creates a new one if does not exist.
- **colddb.remove_tag(db,name)**
Removes a tag.
- **colddb.get_count(db,tag_name) --> count**
Returns the count from the index table file.
- **colddb.iterate_index_table(db,begin_func,func_on_iterate,end_func,args,tag_name)**
- function iterates through the index table file.
- **begin_func(args) --> args**
- function that is ran before the loop begins.
- **func_on_iterate(key,index,args)**
- function that is ran in the for loop.
- **end_func(args)**
- end function that is ran after the for loop ends.
- **colddb.set(db,name,_table,tag_name)**
- Writes data to the database. Key-Value.
- **colddb.set_key(db,name,tag_name)**
- Writes data to the database. Key-nil.
- **colddb.get(db,name,tag_name,callback(arg))**
- Returns specified data from the database in a callback function.
- **colddb.get_key(db,name,tag_name,callback(arg))**
- Returns if the key exist in the database.
- **colddb.remove(db,name,tag_name)**
- Deletes the specified data from the database.
- **Database object fields**
- **indexes**
- If truth the database makes a indexing file for keys.
- **add_to_mem_pool**
- If truth when you get keys or values it gets cached in the memory for faster access next time.
License
===========

194
async.lua
View File

@ -5,202 +5,232 @@ if not extended_api.Async then
extended_api.Async = {}
end
function extended_api.Async.create_async_pool()
local pool = {threads = {},globalstep_threads = {},task_queue = {},resting = 200,maxtime = 200,queue_threads = 8,state = "suspended"}
return pool
end
function extended_api.Async()
local self = {}
function extended_api.Async.create_worker(pool,func)
self.pool = {threads = {}, globalstep_threads = {}, task_queue = {}, resting = 200, maxtime = 200, queue_threads = 8, state = "suspended"}
self.create_worker = function(func)
local thread = coroutine.create(func)
table.insert(pool.threads, thread)
if not thread or coroutine.status(thread) == "dead" then
minetest.after(0.3, self.create_worker, func)
minetest.after(0.5, self.schedule_worker)
minetest.chat_send_all("Fall")
return
end
table.insert(self.pool.threads, thread)
end
function extended_api.Async.create_globalstep_worker(pool,func)
self.create_globalstep_worker = function(func)
local thread = coroutine.create(func)
table.insert(pool.globalstep_threads, thread)
if not thread or coroutine.status(thread) == "dead" then
minetest.after(0.3, self.create_globalstep_worker, func)
minetest.after(0.5, self.schedule_globalstep_worker)
return
end
function extended_api.Async.run_worker(pool,index)
local thread = pool.threads[index]
if thread == nil or coroutine.status(thread) == "dead" then
table.remove(pool.threads, index)
minetest.after(0,extended_api.Async.schedule_worker,pool)
table.insert(self.pool.globalstep_threads, thread)
end
self.run_worker = function(index)
local thread = self.pool.threads[index]
if not thread or coroutine.status(thread) == "dead" then
table.remove(self.pool.threads, index)
minetest.after(0, self.schedule_worker)
return false
else
coroutine.resume(thread)
minetest.after(0,extended_api.Async.schedule_worker,pool)
minetest.after(0, self.schedule_worker)
return true
end
end
function extended_api.Async.run_globalstep_worker(pool,index)
local thread = pool.globalstep_threads[index]
if thread == nil or coroutine.status(thread) == "dead" then
table.remove(pool.globalstep_threads, index)
minetest.after(0,extended_api.Async.schedule_globalstep_worker,pool)
self.run_globalstep_worker = function(index)
local thread = self.pool.globalstep_threads[index]
if not thread or coroutine.status(thread) == "dead" then
table.remove(self.pool.globalstep_threads, index)
minetest.after(0, self.schedule_globalstep_worker)
return false
else
coroutine.resume(thread)
minetest.after(0,extended_api.Async.schedule_globalstep_worker,pool)
minetest.after(0, self.schedule_globalstep_worker)
return true
end
end
function extended_api.Async.schedule_worker(pool)
pool.state = "running"
for index,value in ipairs(pool.threads) do
minetest.after(pool.resting / 1000,extended_api.Async.run_worker,pool,index)
self.schedule_worker = function()
self.pool.state = "running"
for index, value in ipairs(self.pool.threads) do
minetest.after(self.pool.resting / 1000, self.run_worker, index)
return true
end
pool.state = "suspended"
self.pool.state = "suspended"
return false
end
function extended_api.Async.schedule_globalstep_worker(pool)
for index,value in ipairs(pool.globalstep_threads) do
minetest.after(0,extended_api.Async.run_globalstep_worker,pool,index)
self.schedule_globalstep_worker = function()
for index, value in ipairs(self.pool.globalstep_threads) do
minetest.after(0, self.run_globalstep_worker, index)
return true
end
return false
end
function extended_api.Async.priority(pool,resting,maxtime)
pool.resting = resting
pool.maxtime = maxtime
self.priority = function(resting, maxtime)
self.pool.resting = resting
self.pool.maxtime = maxtime
end
function extended_api.Async.iterate(pool,from,to,func,callback)
extended_api.Async.create_worker(pool,function()
local last_time = minetest.get_us_time() * 1000
local maxtime = pool.maxtime
self.iterate = function(from, to, func, callback)
self.create_worker(function()
local last_time = minetest.get_us_time() / 1000
local maxtime = self.pool.maxtime
for i = from, to do
local b = func(i)
if b ~= nil and b == false then
break
end
if minetest.get_us_time() * 1000 > last_time + maxtime then
if minetest.get_us_time() / 1000 > last_time + maxtime then
coroutine.yield()
last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000
end
end
if callback then
callback()
end
return
end)
extended_api.Async.schedule_worker(pool)
self.schedule_worker()
end
function extended_api.Async.foreach(pool,array, func, callback)
extended_api.Async.create_worker(pool,function()
local last_time = minetest.get_us_time() * 1000
local maxtime = pool.maxtime
self.foreach = function(array, func, callback)
self.create_worker(function()
local last_time = minetest.get_us_time() / 1000
local maxtime = self.pool.maxtime
for k,v in ipairs(array) do
local b = func(k,v)
if b ~= nil and b == false then
break
end
if minetest.get_us_time() * 1000 > last_time + maxtime then
if minetest.get_us_time() / 1000 > last_time + maxtime then
coroutine.yield()
last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000
end
end
if callback then
callback()
end
return
end)
extended_api.Async.schedule_worker(pool)
self.schedule_worker()
end
function extended_api.Async.do_while(pool,condition_func, func, callback)
extended_api.Async.create_worker(pool,function()
local last_time = minetest.get_us_time() * 1000
local maxtime = pool.maxtime
self.do_while = function(condition_func, func, callback)
self.create_worker(function()
local last_time = minetest.get_us_time() / 1000
local maxtime = self.pool.maxtime
while(condition_func()) do
local c = func()
if c ~= nil and c ~= condition_func() then
break
end
if minetest.get_us_time() * 1000 > last_time + maxtime then
if minetest.get_us_time() / 1000 > last_time + maxtime then
coroutine.yield()
last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000
end
end
if callback then
callback()
end
return
end)
extended_api.Async.schedule_worker(pool)
self.schedule_worker()
end
function extended_api.Async.register_globalstep(pool,func)
extended_api.Async.create_globalstep_worker(pool,function()
local last_time = minetest.get_us_time() * 1000
self.register_globalstep = function(func)
self.create_globalstep_worker(function()
local last_time = minetest.get_us_time() / 1000000
local dtime = last_time
while(true) do
dtime = (minetest.get_us_time() / 1000000) - last_time
func(dtime)
dtime = minetest.get_us_time() * 1000
-- 0.05 seconds
if minetest.get_us_time() * 1000 > last_time + 50 then
if minetest.get_us_time() / 1000000 > last_time + 0.05 then
coroutine.yield()
local last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000000
end
end
end)
extended_api.Async.schedule_globalstep_worker(pool)
self.schedule_globalstep_worker()
end
function extended_api.Async.chain_task(pool,tasks,callback)
extended_api.Async.create_worker(pool,function()
self.chain_task = function(tasks, callback)
self.create_worker(function()
local pass_arg = nil
local last_time = minetest.get_us_time() * 1000
local maxtime = pool.maxtime
local last_time = minetest.get_us_time() / 1000
local maxtime = self.pool.maxtime
for index, task_func in pairs(tasks) do
local p = task_func(pass_arg)
if p ~= nil then
pass_arg = p
end
if minetest.get_us_time() * 1000 > last_time + maxtime then
if minetest.get_us_time() / 1000 > last_time + maxtime then
coroutine.yield()
last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000
end
end
if callback then
callback(pass_arg)
end
return
end)
extended_api.Async.schedule_worker(pool)
self.schedule_worker()
end
function extended_api.Async.queue_task(pool,func,callback)
table.insert(pool.task_queue,{func = func,callback = callback})
if pool.queue_threads > 0 then
pool.queue_threads = pool.queue_threads - 1
extended_api.Async.create_worker(pool,function()
self.queue_task = function(func, callback)
table.insert(self.pool.task_queue, {func = func,callback = callback})
if self.pool.queue_threads > 0 then
self.pool.queue_threads = self.pool.queue_threads - 1
self.create_worker(function()
local pass_arg = nil
local last_time = minetest.get_us_time() * 1000
local maxtime = pool.maxtime
local last_time = minetest.get_us_time() / 1000
local maxtime = self.pool.maxtime
while(true) do
local task_func = pool.task_queue[1]
table.remove(pool.task_queue,1)
local task_func = self.pool.task_queue[1]
table.remove(self.pool.task_queue, 1)
if task_func and task_func.func then
pass_arg = nil
local p = task_func.func(pass_arg)
local p = task_func.func()
if p ~= nil then
pass_arg = p
end
if task_func.callback then
task_func.callback(pass_arg)
end
if minetest.get_us_time() * 1000 > last_time + maxtime then
if minetest.get_us_time() / 1000 > last_time + maxtime then
coroutine.yield()
last_time = minetest.get_us_time() * 1000
last_time = minetest.get_us_time() / 1000
end
else
pool.queue_threads = pool.queue_threads + 1
break
self.pool.queue_threads = self.pool.queue_threads + 1
return
end
end
end)
extended_api.Async.schedule_worker(pool)
self.schedule_worker()
end
end
self.single_task = function(func, callback)
self.create_worker(function()
local pass_arg = func()
if p ~= nil then
pass_arg = p
end
if task_func.callback then
task_func.callback(pass_arg)
end
return
end)
self.schedule_worker()
end
return self
end

View File

@ -4,12 +4,40 @@ local function createDir(directory)
return minetest.mkdir(directory)
end
function colddb.file_Exists(db,name,tag_name)
function colddb.Colddb(directory)
local directory = string.format("%s/%s/", minetest.get_worldpath(), directory)
if not createDir(directory) then
error(string.format("%s is not a directory.", directory))
end
local self = {}
self.db = {
global_tag = "",
directory = directory,
tags = {},
mem_pool = {},
mem_pool_del = {},
indexes_pool = {},
iterate_queue = {},
indexes = false,
add_to_mem_pool = true,
async = extended_api.Async(),
}
self.db.async.priority(150, 250)
-- make tables weak so the garbage-collector will remove unused data
setmetatable(self.db.tags, {__mode = "kv"})
setmetatable(self.db.mem_pool, {__mode = "kv"})
setmetatable(self.db.mem_pool_del, {__mode = "kv"})
setmetatable(self.db.indexes_pool, {__mode = "kv"})
self.file_Exists = function(name, tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local f = io.open(string.format("%s%s%s.cold",db.directory,t,name),"r")
local f = io.open(string.format("%s%s%s.cold", self.db.directory, t, name), "r")
if f ~= nil then
io.close(f)
return true
@ -19,170 +47,8 @@ function colddb.file_Exists(db,name,tag_name)
return false
end
function colddb.get_db(directory)
local directory = string.format("%s/%s",minetest.get_worldpath(),directory)
if not createDir(directory) then
error(string.format("%s is not a directory.",directory))
end
db = {
global_tag = "",
directory = directory,
tags = {},
mem_pool = {},
indexes_pool = {},
iterate_queue = {},
indexes = false,
add_to_mem_pool = true,
async_pool = extended_api.Async.create_async_pool(),
}
extended_api.Async.priority(db.async_pool,150,250)
-- make tables weak so the garbage-collector will remove unused data
setmetatable(db.tags, {__mode = "kv"})
setmetatable(db.mem_pool, {__mode = "kv"})
setmetatable(db.indexes_pool, {__mode = "kv"})
return db
end
function colddb.add_global_tag(db,tag)
local t = ""
if type(tag) == "table" then
for index in pairs(tag) do
t = string.format("%s%s/",t,index)
end
else
t = string.format("%s/",tag)
end
db.global_tag = string.format("%s%s",db.global_tag,t)
db.directory = string.format("%s/%s",db.directory,t)
if not createDir(db.directory) then
error(string.format("%s is not a directory.",db.directory))
end
end
function colddb.add_tag(db,name,tag)
local t = ""
if not db.tags[name] then
db.tags[name] = ""
end
if type(tag) == "table" then
for key,value in pairs(tag) do
t = string.format("%s%s/",t,value)
end
else
t = string.format("%s/",tag)
end
local test_path = string.format("%s%s%s",db.directory,db.tags[name],t)
if not createDir(test_path) then
error(string.format("%s is not a directory.",test_path))
end
db.tags[name] = string.format("%s%s",db.tags[name],t)
end
function colddb.get_tag(db,name)
if not name then
return ""
end
local tag = db.tags[name]
if tag then
return tag
end
return ""
end
function colddb.get_or_add_tag(db,name,tag)
if not db.tags[name] then
colddb.add_tag(db,name,tag)
end
return name
end
function colddb.remove_tag(db,name)
if db.tags[name] then
local delete_path = string.format("%s%s",db.directory,db.tags[name])
local wc = delete_path:len()
delete_path = delete_path:sub(0,wc-1)
db.tags[name] = nil
os.remove(delete_path)
end
end
function colddb.delete_file(db,name,tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local text = string.format("%s%s%s.cold",db.directory,t,name)
local err,msg = os.remove(text)
if err == nil then
print(string.format("error removing db data %s error message: %s",text,msg))
end
end
function colddb.load_table(db,name,tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local f = io.open(string.format("%s%s%s.cold",db.directory,t,name), "r")
if f then
local data = minetest.deserialize(f:read("*a"))
f:close()
return data
end
return nil
end
function colddb.save_table(db,name, _table,tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
return minetest.safe_file_write(string.format("%s%s%s.cold",db.directory,t,name), minetest.serialize(_table))
end
function colddb.save_key(db,name,tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
return minetest.safe_file_write(string.format("%s%s%s.cold",db.directory,t,name), "")
end
function colddb.load_key(db,name,tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local f = io.open(string.format("%s%s%s.cold",db.directory,t,name), "r")
if f then
f:close()
return true
end
return false
end
function colddb.delete_index_table(db,tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local p = string.format("%s%sæIndex_table.cold",db.directory,t)
if colddb.file_Exists(db,name,tag_name) then
local err,msg = os.remove(p)
if err == nil then
print(string.format("error removing db data %s error message: %s",p,msg))
end
return true
end
return false
end
local function delete_lines_func_begin(args)
local f = io.open(args.copyfile, "w")
local db = args.db
local cs = args.cs
if f then
args.file = f
args.removedlist = {}
@ -193,9 +59,7 @@ end
local function delete_lines_func_i(line, i, args)
local f = args.file
local db = args.db
local cs = args.cs
local om = db.indexes_pool[args.cs]
local om = self.db.indexes_pool[args.cs]
if om and not om.deleted_items[line] then
f:write(string.format("\n%s", line))
else
@ -205,11 +69,10 @@ local function delete_lines_func_i(line,i,args)
end
local function delete_lines_func_end(args)
local db = args.db
local cs = args.cs
if db.indexes_pool[cs] or db.indexes_pool[cs].file then
db.indexes_pool[cs].file:close()
db.indexes_pool[cs].file = nil
if self.db.indexes_pool[cs] or self.db.indexes_pool[cs].file then
self.db.indexes_pool[cs].file:close()
self.db.indexes_pool[cs].file = nil
args.file:seek("set")
args.file:write(string.format("%i", args.count))
args.file:close()
@ -221,21 +84,254 @@ local function delete_lines_func_end(args)
os.rename(args.copyfile, args.oldfile)
end
for i, l in pairs(args.removedlist) do
db.indexes_pool[cs].deleted_items[i] = nil
self.db.indexes_pool[cs].deleted_items[i] = nil
end
db.indexes_pool[cs].deleting = false
self.db.indexes_pool[cs].deleting = false
end
args = nil
end
function colddb.delete_lines(db,_lines,tag_name)
local function iterate(func_on_iterate, end_func, count, cs, args)
local f = self.db.indexes_pool[cs]
local fl = f.file
self.db.async.iterate(1, count, function(i)
local line = fl:read("*l")
if args.do_not_skip_removed_items or not self.db.indexes_pool[cs].deleted_items[line] then
local ar = func_on_iterate(line, i, args)
if ar ~= nil then
args = ar
return args
end
end
end,function()
if end_func then
end_func(args)
end
if self.db.iterate_queue[cs] and self.db.iterate_queue[cs][1] then
local copy = self.db.iterate_queue[cs][1]
f = self.db.indexes_pool[cs]
if not f or not f.file then
self.open_index_table(copy.tag_name)
f = self.db.indexes_pool[cs]
end
if copy.begin_func then
local a = copy.begin_func(copy.args)
if a and type(a) == "table" then
copy.args = a
end
end
minetest.after(0, iterate, copy.func_on_iterate, copy.end_func, copy.count, copy.cs, copy.args)
table.remove(self.db.iterate_queue[cs], 1)
return false
else
fl:close()
self.db.iterate_queue[cs] = nil
end
self.db.indexes_pool[cs].iterating = false
return false
end)
end
local function load_into_mem(name, _table, tag_name)
if self.db.add_to_mem_pool then
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
if not self.db.mem_pool[cs] then
self.db.mem_pool[cs] = {mem = _table, indexes = self.db.indexes}
else
self.db.mem_pool[cs].mem = _table
self.db.mem_pool[cs].indexes = self.db.indexes
end
end
end
local path_count = {}
local function _remove_tag(delete_path, prev_dp)
if path_count[delete_path] and path_count[delete_path] > 0 then
minetest.after(1.5, _remove_tag, delete_path)
return
elseif path_count[delete_path] and path_count[delete_path] < 1 then
self.db.mem_pool = {}
self.db.mem_pool_del = {}
os.remove(delete_path)
return
elseif not path_count[delete_path] then
path_count[delete_path] = 0
end
local list = minetest.get_dir_list(delete_path)
self.db.async.foreach(list, function(k, v)
v = string.format("%s/%s", delete_path, v)
local err = os.remove(v)
if err == nil then
minetest.after(0, _remove_tag, v, delete_path)
path_count[delete_path] = path_count[delete_path] + 1
end
end, function()
if prev_dp then
path_count[prev_dp] = path_count[prev_dp] - 1
end
if path_count[delete_path] > 0 then
minetest.after(1.5, _remove_tag, delete_path)
else
self.db.mem_pool = {}
self.db.mem_pool_del = {}
os.remove(delete_path)
end
end)
end
self.add_global_tag = function(tag)
local t = ""
if type(tag) == "table" then
for index in pairs(tag) do
t = string.format("%s%s/", t, index)
end
else
t = string.format("%s/", tag)
end
self.db.global_tag = string.format("%s%s", self.db.global_tag, t)
self.db.directory = string.format("%s/%s", self.db.directory, t)
if not createDir(self.db.directory) then
error(string.format("%s is not a directory.", self.db.directory))
end
end
self.add_tag = function(name, tag)
local t = ""
if not self.db.tags[name] then
self.db.tags[name] = ""
end
if type(tag) == "table" then
for key, value in pairs(tag) do
t = string.format("%s%s/", t, value)
end
else
t = string.format("%s/", tag)
end
local test_path = string.format("%s%s%s", self.db.directory, self.db.tags[name], t)
if not createDir(test_path) then
error(string.format("%s is not a directory.", test_path))
end
self.db.tags[name] = string.format("%s%s", self.db.tags[name], t)
end
self.get_tag = function(name)
if not name then
return ""
end
local tag = self.db.tags[name]
if tag then
return tag
end
return ""
end
self.get_or_add_tag = function(name, tag)
if not self.db.tags[name] then
self.add_tag(name, tag)
end
return name
end
self.remove_tag = function(name)
if self.db.tags[name] then
local delete_path = string.format("%s%s", self.db.directory, self.db.tags[name])
local wc = delete_path:len()
delete_path = delete_path:sub(0, wc-1)
self.db.tags[name] = nil
local err = os.remove(delete_path)
if err == nil then
minetest.after(0.1, _remove_tag, delete_path)
end
end
end
self.delete_file = function(name, tag_name)
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
local text = string.format("%s%s%s.cold", self.db.directory, t, name)
local err, msg = os.remove(text)
if err == nil then
print(string.format("error removing db data %s error message: %s", text, msg))
end
end
self.load_table = function(name, tag_name)
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
local f = io.open(string.format("%s%s%s.cold", self.db.directory, t, name), "r")
if f then
local data = minetest.deserialize(f:read("*a"))
f:close()
return data
end
return nil
end
self.save_table = function(name, _table, tag_name)
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
return minetest.safe_file_write(string.format("%s%s%s.cold", self.db.directory, t, name), minetest.serialize(_table))
end
self.save_key = function(name, tag_name)
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
return minetest.safe_file_write(string.format("%s%s%s.cold", self.db.directory, t, name), "")
end
self.load_key = function(name, tag_name)
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
local f = io.open(string.format("%s%s%s.cold", self.db.directory, t, name), "r")
if f then
f:close()
return true
end
return false
end
self.delete_index_table = function(tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local p = string.format("%s%sæIndex_table.cold", self.db.directory, t)
if self.file_Exists(name, tag_name) then
local err, msg = os.remove(p)
if err == nil then
print(string.format("error removing db data %s error message: %s", p, msg))
end
return true
end
return false
end
self.delete_lines = function(_lines, tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
local f = db.indexes_pool[cs]
local f = self.db.indexes_pool[cs]
local k = type(_lines)
if k == "string" then
f.deleted_items[_lines] = true
@ -244,59 +340,34 @@ function colddb.delete_lines(db,_lines,tag_name)
f.deleted_items[i] = true
end
end
if not db.indexes_pool[cs].deleting then
db.indexes_pool[cs].deleting = false
if not self.db.indexes_pool[cs].deleting then
self.db.indexes_pool[cs].deleting = false
end
if f and f.file and not db.indexes_pool[cs].deleting then
db.indexes_pool[cs].deleting = true
if db.indexes_pool[cs].needs_flushing == true then
if f and f.file and not self.db.indexes_pool[cs].deleting then
self.db.indexes_pool[cs].deleting = true
if self.db.indexes_pool[cs].needs_flushing == true then
f.file:flush()
db.indexes_pool[cs].needs_flushing = false
self.db.indexes_pool[cs].needs_flushing = false
end
local oldfile = string.format("%s%sæIndex_table.cold",db.directory,t)
local copyfile = string.format("%s%sæIndex_table.cold.replacer",db.directory,t)
local args = {db=db,cs=cs,oldfile=oldfile,copyfile=copyfile,do_not_skip_removed_items=true}
db.indexes_pool[cs] = f
colddb.iterate_index_table(db,delete_lines_func_begin,delete_lines_func_i,delete_lines_func_end,args,tag_name)
local oldfile = string.format("%s%sæIndex_table.cold", self.db.directory, t)
local copyfile = string.format("%s%sæIndex_table.cold.replacer", self.db.directory, t)
local args = {cs = cs, oldfile = oldfile, copyfile = copyfile, do_not_skip_removed_items = true}
self.db.indexes_pool[cs] = f
iterate_index_table(delete_lines_func_begin, delete_lines_func_i, delete_lines_func_end, args, tag_name)
end
end
function colddb.create_index_table(db,tag_name)
self.open_index_table = function(tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local p = string.format("%s%sæIndex_table.cold",db.directory,t)
if not colddb.file_Exists(db,name,tag_name) then
local f = io.open(p, "w")
if f then
f:seek("set")
f:write("0")
f:close()
end
end
local f = io.open(p, "r+")
if f then
f:seek("set")
f:write("0")
db.indexes_pool[string.format("%s%s",t,name)] = {file = f,needs_flushing = false,deleted_items = {},iterating = false}
return true
end
return false
end
function colddb.open_index_table(db,tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
local fs = db.indexes_pool[cs]
local fs = self.db.indexes_pool[cs]
if not fs then
local p = string.format("%s%sæIndex_table.cold",db.directory,t)
if not colddb.file_Exists(db,name,tag_name) then
local p = string.format("%s%sæIndex_table.cold", self.db.directory, t)
if not self.file_Exists(name,tag_name) then
local f = io.open(p, "w")
if f then
f:seek("set")
@ -306,7 +377,7 @@ function colddb.open_index_table(db,tag_name)
end
local f = io.open(p, "r+")
if f then
db.indexes_pool[cs] = {file = f,needs_flushing = false,deleted_items = {},iterating = false}
self.db.indexes_pool[cs] = {file = f, needs_flushing = false, deleted_items = {}, iterating = false}
return f
end
return nil
@ -316,14 +387,14 @@ function colddb.open_index_table(db,tag_name)
return nil
end
function colddb.append_index_table(db,key,tag_name)
self.append_index_table = function(key, tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
local f = db.indexes_pool[cs]
local f = self.db.indexes_pool[cs]
local k = type(key)
if f and f.file and k == "string" then
local fl = f.file
@ -331,7 +402,7 @@ function colddb.append_index_table(db,key,tag_name)
fl:flush()
f.needs_flushing = false
end
db.indexes_pool[cs].needs_flushing = true
self.db.indexes_pool[cs].needs_flushing = true
fl:seek("end")
fl:write(string.format("\n%s", key))
fl:seek("set")
@ -339,13 +410,14 @@ function colddb.append_index_table(db,key,tag_name)
count = count + 1
fl:seek("set")
fl:write(string.format("%i", count))
fl:close()
elseif f and f.file then
local fl = f.file
if f.needs_flushing == true then
fl:flush()
f.needs_flushing = false
end
db.indexes_pool[cs].needs_flushing = true
self.db.indexes_pool[cs].needs_flushing = true
local c = 0
for i in pairs(key) do
fl:seek("end")
@ -357,19 +429,20 @@ function colddb.append_index_table(db,key,tag_name)
count = count + c
fl:seek("set")
fl:write(string.format("%i", count))
fl:close()
else
return false
end
end
function colddb.get_count(db,tag_name)
self.get_count = function(tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s",t,name)
local f = db.indexes_pool[cs]
local f = self.db.indexes_pool[cs]
if f and f.file then
local fl = f.file
if f.needs_flushing == true then
@ -378,65 +451,26 @@ function colddb.get_count(db,tag_name)
end
fl:seek("set")
local count = tonumber(fl:read("*l"))
fl:close()
return count
end
return nil
end
local function iterate(db,func_on_iterate,end_func,count,cs,args)
local f = db.indexes_pool[cs]
local fl = f.file
extended_api.Async.iterate(db.async_pool,1,count,function(i)
local line = fl:read("*l")
if args.do_not_skip_removed_items or not db.indexes_pool[cs].deleted_items[line] then
local ar = func_on_iterate(line,i,args)
if ar ~= nil then
args = ar
return args
end
end
end,function()
if end_func then
end_func(args)
end
if db.iterate_queue[cs] and db.iterate_queue[cs][1] then
local copy = db.iterate_queue[cs][1]
f = db.indexes_pool[cs]
if not f or not f.file then
colddb.open_index_table(db,copy.tag_name)
f = db.indexes_pool[cs]
end
if copy.begin_func then
local a = copy.begin_func(copy.args)
if a and type(a) == "table" then
copy.args = a
end
end
minetest.after(0,iterate,copy.db,copy.func_on_iterate,copy.end_func,copy.count,copy.cs,copy.args)
table.remove(db.iterate_queue[cs],1)
return false
else
db.iterate_queue[cs] = nil
end
db.indexes_pool[cs].iterating = false
return false
end)
end
function colddb.iterate_index_table(db,begin_func,func_on_iterate,end_func,args,tag_name)
self.iterate_index_table = function(begin_func, func_on_iterate, end_func, args, tag_name)
local t = ""
local name = "æIndex_table"
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
local f = db.indexes_pool[cs]
local f = self.db.indexes_pool[cs]
if not f or not f.file then
colddb.open_index_table(db,tag_name)
f = db.indexes_pool[cs]
self.open_index_table(tag_name)
f = self.db.indexes_pool[cs]
end
if f and f.file and db.indexes_pool[cs].iterating == false then
db.indexes_pool[cs].iterating = true
if f and f.file and self.db.indexes_pool[cs].iterating == false then
self.db.indexes_pool[cs].iterating = true
local fl = f.file
if f.needs_flushing == true then
fl:flush()
@ -459,11 +493,11 @@ function colddb.iterate_index_table(db,begin_func,func_on_iterate,end_func,args,
if c < 1 then
-- If theres nothing to index then return
end_func(args)
db.indexes_pool[cs].iterating = false
self.db.indexes_pool[cs].iterating = false
return false
end
-- Start iterating the index table
iterate(db,func_on_iterate,end_func,c,cs,args)
iterate(func_on_iterate, end_func, c, cs, args)
elseif f and f.file then
local fl = f.file
-- If its iterating some other function then add this one to the queue list
@ -473,175 +507,175 @@ function colddb.iterate_index_table(db,begin_func,func_on_iterate,end_func,args,
-- If theres nothing to index then return
return false
end
if not db.iterate_queue[cs] then
db.iterate_queue[cs] = {}
if not self.db.iterate_queue[cs] then
self.db.iterate_queue[cs] = {}
end
local _table = {db=db,begin_func=begin_func,func_on_iterate=func_on_iterate,end_func=end_func,count=c,cs=cs,tag_name=tag_name,args=args}
table.insert(db.iterate_queue[cs],_table)
local _table = {begin_func = begin_func, func_on_iterate = func_on_iterate, end_func = end_func, count = c, cs = cs, tag_name = tag_name, args = args}
table.insert(self.db.iterate_queue[cs], _table)
end
end
local function load_into_mem(db,name,_table,tag_name)
if db.add_to_mem_pool then
self.set = function(name, _table, tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
if self.db.indexes and not self.file_Exists(name,tag_name) then
self.db.async.queue_task(function()
local cs2 = string.format("%s%s", t , "æIndex_table")
local om = self.db.indexes_pool[cs2]
if not self.file_Exists("æIndex_table", tag_name) or not (om and om.file) then
self.open_index_table(tag_name)
end
self.append_index_table(name, tag_name)
end)
end
self.db.async.queue_task(function()
self.save_table(name, _table, tag_name)
end)
if self.db.add_to_mem_pool then
load_into_mem(name, _table, tag_name)
end
local cs = string.format("%s%s", t, name)
if not db.mem_pool[cs] then
db.mem_pool[cs] = {mem = _table,indexes = db.indexes}
else
db.mem_pool[cs].mem = _table
db.mem_pool[cs].indexes = db.indexes
end
end
self.db.mem_pool_del[cs] = nil
end
function colddb.set(db,name,_table,tag_name)
self.set_key = function(name, tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
if db.indexes and not colddb.file_Exists(db,name,tag_name) then
extended_api.Async.queue_task(db.async_pool,function()
if self.db.indexes and not self.file_Exists(name, tag_name) then
self.db.async.queue_task(function()
local cs2 = string.format("%s%s", t, "æIndex_table")
local om = db.indexes_pool[cs2]
if not colddb.file_Exists(db,"æIndex_table",tag_name) or not (om and om.file) then
colddb.open_index_table(db,tag_name)
local om = self.db.indexes_pool[cs2]
if not self.file_Exists("æIndex_table", tag_name) or not (om and om.file) then
self.open_index_table(tag_name)
end
colddb.append_index_table(db,name,tag_name)
self.append_index_table(name, tag_name)
end)
end
extended_api.Async.queue_task(db.async_pool,function()
colddb.save_table(db,name, _table,tag_name)
self.db.async.queue_task(function()
self.save_key(name, tag_name)
end)
if db.add_to_mem_pool then
load_into_mem(db,name,_table,tag_name)
if self.db.add_to_mem_pool then
load_into_mem(name, "", tag_name)
end
local cs = string.format("%s%s", t, name)
self.db.mem_pool_del[cs] = nil
end
function colddb.set_key(db,name,tag_name)
self.get = function(name, tag_name, callback)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
if db.indexes and not colddb.file_Exists(db,name,tag_name) then
extended_api.Async.queue_task(db.async_pool,function()
local cs2 = string.format("%s%s",t,"æIndex_table")
local om = db.indexes_pool[cs2]
if not colddb.file_Exists(db,"æIndex_table",tag_name) or not (om and om.file) then
colddb.open_index_table(db,tag_name)
end
colddb.append_index_table(db,name,tag_name)
end)
end
extended_api.Async.queue_task(db.async_pool,function()
colddb.save_key(db,name, tag_name)
end)
if db.add_to_mem_pool then
load_into_mem(db,name,"",tag_name)
end
end
function colddb.get(db,name,tag_name,callback)
local cs = string.format("%s%s", t, name)
if self.db.mem_pool_del[cs] then
if callback then
extended_api.Async.queue_task(db.async_pool,function()
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
callback(nil)
end
local cs = string.format("%s%s",t,name)
local pm = db.mem_pool[cs]
return nil
end
if callback then
self.db.async.queue_task(function()
local pm = self.db.mem_pool[cs]
if pm then
return pm.mem
else
local _table = colddb.load_table(db,name,tag_name)
local _table = self.load_table(name, tag_name)
if _table then
load_into_mem(db,name,_table,tag_name)
load_into_mem(name, _table, tag_name)
return _table
end
end
self.db.mem_pool_del[cs] = true
return nil
end,callback)
else
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local cs = string.format("%s%s",t,name)
local pm = db.mem_pool[cs]
local pm = self.db.mem_pool[cs]
if pm then
return pm.mem
else
local _table = colddb.load_table(db,name,tag_name)
local _table = self.load_table(name, tag_name)
if _table then
load_into_mem(db,name,_table,tag_name)
load_into_mem(name, _table, tag_name)
return _table
end
end
self.db.mem_pool_del[cs] = true
return nil
end
end
function colddb.get_key(db,name,tag_name,callback)
if callback then
extended_api.Async.queue_task(db.async_pool,function()
self.get_key = function(name, tag_name, callback)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
local pm = db.mem_pool[cs]
if self.db.mem_pool_del[cs] then
if callback then
callback(false)
end
return false
end
if callback then
self.db.async.queue_task(function()
local t = ""
if tag_name then
t = self.get_tag(tag_name)
end
local pm = self.db.mem_pool[cs]
if pm then
return true
else
local bool = colddb.load_key(db,name,tag_name)
local bool = self.load_key(name, tag_name)
if bool then
load_into_mem(db,name,bool,tag_name)
load_into_mem(name, bool, tag_name)
return bool
end
end
self.db.mem_pool_del[cs] = true
return nil
end,callback)
else
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
end
local cs = string.format("%s%s",t,name)
local pm = db.mem_pool[cs]
local pm = self.db.mem_pool[cs]
if pm then
return true
else
local bool = colddb.load_key(db,name,tag_name)
local bool = self.load_key(name, tag_name)
if bool then
load_into_mem(db,name,bool,tag_name)
load_into_mem(name, bool, tag_name)
return bool
end
end
self.db.mem_pool_del[cs] = true
return nil
end
end
function colddb.remove(db,name,tag_name)
self.remove = function(name, tag_name)
local t = ""
if tag_name then
t = colddb.get_tag(db,tag_name)
t = self.get_tag(tag_name)
end
local cs = string.format("%s%s", t, name)
if db.mem_pool[cs] then
db.mem_pool[cs] = nil
end
if db.indexes and colddb.file_Exists(db,"æIndex_table",tag_name) then
extended_api.Async.queue_task(db.async_pool,function()
self.db.mem_pool[cs] = nil
self.db.mem_pool_del[cs] = true
if self.db.indexes and self.file_Exists("æIndex_table", tag_name) then
self.db.async.queue_task(function()
local cs2 = string.format("%s%s",t,"æIndex_table")
if not (db.indexes_pool[cs2] and db.indexes_pool[cs2].file) then
colddb.open_index_table(db,tag_name)
if not (self.db.indexes_pool[cs2] and self.db.indexes_pool[cs2].file) then
self.open_index_table(tag_name)
end
colddb.delete_lines(db,name,tag_name)
self.delete_lines(name, tag_name)
end)
end
extended_api.Async.queue_task(db.async_pool,function()
colddb.delete_file(db,name,tag_name)
self.db.async.queue_task(function()
self.delete_file(name, tag_name)
end)
end
return self
end