4 Commits

Author SHA1 Message Date
dd396bfad2 Upload C# source code, raw 2015-09-25 22:46:42 +02:00
f389e6bd13 More efficient python script
- Fixed pipelining
- Cleaning everything up
- Don't re-download saved characters
- Add the media to .gitignore

About pipelining:

According to python:
1) you send a request
2) you MUST get response headers for (1) (THIS IS MANDATORY)
3) you send another request
4) you get response body for (2)
5) response headers for (3)
6) response body for (5)

Only two requests can be pipelined. Surely this is an unavoidable, wait no it's just written into the code to error out if you don't do it that way.

according to reality:
1) you send a request
2) you do not get response headers for (1)
3) you repeat steps 1-2 until enough responses are queued
4) you receive those responses as header,body,header,body...

they even name it with a __ so to make it hard to override, but the state can safely go to Idle after a request has sent, whether or not response headers have come in. Sure the connection might close, but then you adjust to not pipeline, and re-send the rest of your requests over a new connection.
2015-09-25 22:43:54 +02:00
e762283dec Use simple_skins if default selected 2015-04-11 15:27:41 +02:00
5cb484e251 Add tooltips 2014-11-23 20:00:52 +01:00
5 changed files with 382 additions and 50 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
character_*.png
character_*.txt

200
MT_skins_updater.cs Normal file
View File

@ -0,0 +1,200 @@
using System;
//Json.NET library (http://json.codeplex.com/)
using Newtonsoft.Json;
using System.Collections.Generic;
using System.Net;
using System.IO;
// MT skins updater for the u_skins mod
// Creator: Krock
// License: zlib (http://www.zlib.net/zlib_license.html)
namespace MT_skins_updater {
class Program {
static void Main(string[] args) {
Console.WriteLine("Welcome to the MT skins updater!");
Console.WriteLine("# Created by: Krock (2014-07-10)");
Engine e = new Engine();
Console.WriteLine(@"Path to the u_skins mod: (ex. 'E:\Minetest\mods\u_skinsdb\u_skins\')");
string path = Console.ReadLine();
Console.WriteLine("Start updating at page: ('0' to update everything)");
int page = getInt(Console.ReadLine());
e.Start(path, page);
Console.WriteLine("Press any key to exit.");
Console.ReadKey(false);
}
public static int getInt(string i) {
int ret = 0;
int.TryParse(i, out ret);
return (ret > 0)? ret : 0;
}
}
class Engine {
string root = "http://minetest.fensta.bplaced.net";
bool alternate = true; //should it use the special version of medadata saving?
public void Start(string path, int page) {
if (path.Length < 5) {
Console.WriteLine("Too short path. STOP.");
return;
}
if (path[path.Length - 1] != '\\') {
path += '\\';
}
if(!Directory.Exists(path + "meta")){
Console.WriteLine("Folder 'meta' not found. STOP.");
return;
}
if(!Directory.Exists(path + "textures")){
Console.WriteLine("Folder 'textures' not found. STOP.");
return;
}
WebClient cli = new WebClient();
//add useragent to identify
cli.Headers.Add("User-Agent", "MT_skin_grabber 1.1");
bool firstSkin = true;
List<string> skin_local = new List<string>();
int pages = page,
updated = 0;
for (; page <= pages; page++) {
string contents = "";
try {
contents = cli.DownloadString(root + "/api/get.json.php?getlist&page=" + page);
} catch(WebException e) {
Console.WriteLine("Whoops! Error at page ID: " + page + ". WebClient sais: " + e.Message);
Console.WriteLine("Press any key to skip this page.");
Console.ReadKey(false);
continue;
}
Data o = JsonConvert.DeserializeObject<Data>(contents);
if (o.pages != pages) {
pages = o.pages;
}
Console.WriteLine("# Page " + page + " (" + o.per_page + " skins)");
for (int i = 0; i < o.skins.Length; i++) {
int id = o.skins[i].id;
if(o.skins[i].type != "image/png"){
Console.WriteLine("Image type '" + o.skins[i].type + "' not supported at skin ID: " + id);
Console.WriteLine("Press any key to continue.");
Console.ReadKey(false);
continue;
}
//eliminate special chars!
o.skins[i].name = WebUtility.HtmlDecode(o.skins[i].name);
o.skins[i].author = WebUtility.HtmlDecode(o.skins[i].author);
//to delete old, removed skins
if (firstSkin) {
firstSkin = false;
string[] files = Directory.GetFiles(path + "textures\\");
for (int f = 0; f < files.Length; f++) {
string[] filePath = stringSplitLast(files[f], '\\'),
fileName = stringSplitLast(filePath[1], '.'),
fileVer = stringSplitLast(fileName[0], '_');
if (fileVer[1] == "" || fileVer[0] != "character") continue;
int skinNr = Program.getInt(fileVer[1]);
if (skinNr <= id) continue;
skin_local.Add(fileName[0]);
}
} else skin_local.Remove("character_" + id);
//get file size, only override changed
FileInfo localImg = new FileInfo(path + "textures\\character_" + id + ".png");
byte[] imageData = Convert.FromBase64String(o.skins[i].img);
bool isDif = true;
if (localImg.Exists) isDif = (Math.Abs(imageData.Length - localImg.Length) >= 3);
if (isDif) {
File.WriteAllBytes(localImg.FullName, imageData);
imageData = null;
//previews
try {
cli.DownloadFile(root + "/skins/1/" + id + ".png", path + "textures\\character_" + id + "_preview.png");
} catch (WebException e) {
Console.WriteLine("Whoops! Error at skin ID: " + id + ". WebClient sais: " + e.Message);
Console.WriteLine("Press any key to continue.");
Console.ReadKey(false);
}
} else {
Console.WriteLine("[SKIP] character_" + id);
continue;
}
string meta = "";
if (!alternate) {
meta = "name = \"" + o.skins[i].name + "\",\n";
meta += "author = \"" + o.skins[i].author + "\",\n";
meta += "comment = \"" + o.skins[i].license + '"';
} else {
meta = o.skins[i].name + '\n' + o.skins[i].author + '\n' + o.skins[i].license;
}
File.WriteAllText(path + "meta\\character_" + id + ".txt", meta);
updated++;
Console.WriteLine("[" + id + "] " + shorten(o.skins[i].name, 20) + "\t by: " + o.skins[i].author + "\t (" + o.skins[i].license + ")");
}
}
foreach (string fileName in skin_local) {
if(File.Exists(path + "textures\\" + fileName + ".png")) {
File.Delete(path + "textures\\" + fileName + ".png");
}
if(File.Exists(path + "textures\\" + fileName + "_preview.png")) {
File.Delete(path + "textures\\" + fileName + "_preview.png");
}
if(File.Exists(path + "meta\\" + fileName + ".txt")) {
File.Delete(path + "meta\\" + fileName + ".txt");
}
Console.WriteLine("[DEL] " + fileName + " (deleted skin)");
}
Console.WriteLine("Done. Updated " + updated + " skins!");
}
string shorten(string inp, int len) {
char[] shr = new char[len];
for (int i = 0; i < len; i++) {
if (i < inp.Length) {
shr[i] = inp[i];
} else shr[i] = ' ';
}
return new string(shr);
}
string[] stringSplitLast(string path, char limiter) {
int found = 0;
int totalLen = path.Length - 1;
for (int i = totalLen; i >= 0; i--) {
if (path[i] == limiter) {
found = i;
break;
}
}
if (found == 0) {
return new string[] { "", "" };
}
int len = totalLen - found;
char[] str_1 = new char[found],
str_2 = new char[len];
for (int i = 0; i < path.Length; i++) {
if (i == found) continue;
if (i < found) {
str_1[i] = path[i];
} else {
str_2[i - found - 1] = path[i];
}
}
return new string[] { new string(str_1), new string(str_2) };
}
}
class Data {
public Skins_data[] skins;
public int page, pages, per_page;
}
class Skins_data {
public string name, author, uploaded, type, license, img;
public int id, license_id;
}
}

View File

@ -1,2 +1,3 @@
unified_inventory unified_inventory
default default
simple_skins?

View File

@ -9,6 +9,7 @@ u_skins.default = "character_1"
u_skins.pages = {} u_skins.pages = {}
u_skins.u_skins = {} u_skins.u_skins = {}
u_skins.file_save = false u_skins.file_save = false
u_skins.simple_skins = false
-- ( Deprecated -- ( Deprecated
u_skins.type = { SPRITE=0, MODEL=1, ERROR=99 } u_skins.type = { SPRITE=0, MODEL=1, ERROR=99 }
@ -33,15 +34,22 @@ end
dofile(u_skins.modpath.."/skinlist.lua") dofile(u_skins.modpath.."/skinlist.lua")
dofile(u_skins.modpath.."/players.lua") dofile(u_skins.modpath.."/players.lua")
if rawget(_G, "skins") then
u_skins.simple_skins = true
end
u_skins.update_player_skin = function(player) u_skins.update_player_skin = function(player)
local name = player:get_player_name() local name = player:get_player_name()
if u_skins.simple_skins and u_skins.u_skins[name] == u_skins.default then
return
end
if not u_skins.is_skin(u_skins.u_skins[name]) then if not u_skins.is_skin(u_skins.u_skins[name]) then
u_skins.u_skins[name] = u_skins.default u_skins.u_skins[name] = u_skins.default
end end
player:set_properties({ player:set_properties({
textures = {u_skins.u_skins[name]..".png"}, textures = {u_skins.u_skins[name]..".png"},
}) })
u_skins.file_save = true
end end
-- Display Current Skin -- Display Current Skin
@ -109,8 +117,9 @@ u_skins.generate_pages = function(texture)
if i > 1 and x == 0 then if i > 1 and x == 0 then
y = 1.8 y = 1.8
end end
formspec = (formspec.."image_button["..x..","..y..";1,2;" formspec = (formspec.."image_button["..x..","..y..";1,2;"..
..skin[2].."_preview.png;u_skins_set$"..skin[1]..";]") skin[2].."_preview.png;u_skins_set$"..skin[1]..";]"..
"tooltip[u_skins_set$"..skin[1]..";"..u_skins.meta[skin[2]].name.."]")
end end
local page_prev = page - 2 local page_prev = page - 2
local page_next = page local page_next = page
@ -144,6 +153,7 @@ minetest.register_on_player_receive_fields(function(player, formname, fields)
if current[1] == "u_skins_set" then if current[1] == "u_skins_set" then
u_skins.u_skins[player:get_player_name()] = u_skins.list[tonumber(current[2])] u_skins.u_skins[player:get_player_name()] = u_skins.list[tonumber(current[2])]
u_skins.update_player_skin(player) u_skins.update_player_skin(player)
u_skins.file_save = true
unified_inventory.set_inventory_formspec(player, "u_skins") unified_inventory.set_inventory_formspec(player, "u_skins")
elseif current[1] == "u_skins_page" then elseif current[1] == "u_skins_page" then
u_skins.pages[player:get_player_name()] = current[2] u_skins.pages[player:get_player_name()] = current[2]

View File

@ -1,61 +1,180 @@
#!/usr/bin/python3 #!/usr/bin/python3
from http.client import HTTPConnection from http.client import HTTPConnection,HTTPException,BadStatusLine,_CS_IDLE
import json import json
import base64 import base64
from contextlib import closing
import sys,os,shutil,time
def die(message,code=23):
print(message,file=sys.stderr)
raise SystemExit(code)
server = "minetest.fensta.bplaced.net" server = "minetest.fensta.bplaced.net"
skinsdir = "u_skins/textures/" skinsdir = "u_skins/textures/"
metadir = "u_skins/meta/" metadir = "u_skins/meta/"
i = 1 curskin = 0
pages = 1 curpage = 1
pages = None
c = HTTPConnection(server) def replace(location,base,encoding=None,path=None):
def addpage(page): if path is None:
global i, pages path = os.path.join(location,base)
print("Page: " + str(page)) mode = "wt" if encoding else "wb"
r = 0 # an unpredictable temp name only needed for a+rwxt directories
try: tmp = os.path.join(location,'.'+base+'-tmp')
c.request("GET", "/api/get.json.php?getlist&page=" + str(page) + "&outformat=base64") def deco(handle):
r = c.getresponse() with open(tmp,mode,encoding=encoding) as out:
except Exception: handle(out)
if r != 0: os.rename(tmp,path)
if r.status != 200: return deco
print("Error", r.status)
exit(r.status) def maybeReplace(location,base,encoding=None):
return def deco(handle):
path = os.path.join(location,base)
data = r.read().decode() if os.path.exists(path): return
l = json.loads(data) return replace(location,base,encoding=encoding,path=path)(handle)
if not l["success"]: return deco
print("Success != True")
exit(1) class Penguin:
r = 0 "idk"
pages = int(l["pages"]) def __init__(self, url, recv, diemessage):
for s in l["skins"]: self.url = url
f = open(skinsdir + "character_" + str(i) + ".png", "wb") self.recv = recv
f.write(base64.b64decode(bytes(s["img"], 'utf-8'))) self.diemessage = diemessage
f.close()
f = open(metadir + "character_" + str(i) + ".txt", "w") class Pipeline(list):
f.write(str(s["name"]) + '\n') "Gawd why am I being so elaborate?"
f.write(str(s["author"]) + '\n') def __init__(self, threshold=10):
f.write(str(s["license"])) "threshold is how many requests in parallel to pipeline"
f.close() self.threshold = threshold
self.sent = True
def __enter__(self):
self.reopen()
return self
def __exit__(self,typ,exn,trace):
self.send()
self.drain()
def reopen(self):
self.c = HTTPConnection(server)
self.send()
def append(self,url,recv,diemessage):
self.sent = False
super().append(Penguin(url,recv,diemessage))
if len(self) > self.threshold:
self.send()
self.drain()
def trydrain(self):
for penguin in self:
print('drain',penguin.url)
try:
penguin.response.begin()
penguin.recv(penguin.response)
except BadStatusLine as e:
print('derped requesting',penguin.url)
return False
except HTTPException as e:
die(penguin.diemessage+' '+repr(e)+' (url='+penguin.url+')')
self.clear()
return True
def drain(self):
print('draining pipeline...',len(self))
assert self.sent, "Can't drain without sending the requests!"
self.sent = False
while self.trydrain() is not True:
self.c.close()
print('drain failed, trying again')
time.sleep(1)
self.reopen()
def trysend(self):
for penguin in pipeline:
print('fill',penguin.url)
try:
self.c.request("GET", penguin.url)
self.c._HTTPConnection__state = _CS_IDLE
penguin.response = self.c.response_class(self.c.sock,
method="GET")
# begin LATER so we can send multiple requests w/out response headers
except BadStatusLine:
return False
except HTTPException as e:
die(diemessage+' because of a '+repr(e))
return True
def send(self):
if self.sent: return
print('filling pipeline...',len(self))
while self.trysend() is not True:
self.c.close()
print('derped resending')
time.sleep(1)
self.reopen()
self.sent = True
with Pipeline() as pipeline:
# two connections is okay, right? one for json, one for preview images
c = HTTPConnection(server)
def addpage(page):
global curskin, pages
print("Page: " + str(page))
r = 0
try: try:
c.request("GET", "/skins/1/" + str(s["id"]) + ".png") c.request("GET", "/api/get.json.php?getlist&page=" + str(page) + "&outformat=base64")
r = c.getresponse() r = c.getresponse()
except Exception: except Exception:
if r != 0: if r != 0:
if r.status != 200: if r.status != 200:
print("Error", r.status) die("Error", r.status)
continue return
data = r.read() data = r.read().decode()
f = open(skinsdir + "character_" + str(i) + "_preview.png", "wb") l = json.loads(data)
f.write(data) if not l["success"]:
f.close() die("Success != True")
i = i + 1 r = 0
addpage(1) pages = int(l["pages"])
if pages > 1: foundOne = False
for p in range(pages-1): for s in l["skins"]:
addpage(p+2) # make sure to increment this, even if the preview exists!
print("Skins have been updated!") curskin = curskin + 1
previewbase = "character_" + str(curskin) + "_preview.png"
preview = os.path.join(skinsdir, previewbase)
if os.path.exists(preview):
print('skin',curskin,'already retrieved')
continue
print('updating skin',curskin,'id',s["id"])
foundOne = True
@maybeReplace(skinsdir, "character_" + str(curskin) + ".png")
def go(f):
f.write(base64.b64decode(bytes(s["img"], 'utf-8')))
f.close()
@maybeReplace(metadir, "character_" + str(curskin) + ".txt",
encoding='utf-8')
def go(f):
f.write(str(s["name"]) + '\n')
f.write(str(s["author"]) + '\n')
f.write(str(s["license"]))
url = "/skins/1/" + str(s["id"]) + ".png"
def closure(skinsdir,previewbase,preview,s):
"explanation: python sucks"
def tryget(r):
print('replacing',s["id"])
if r.status != 200:
print("Error", r.status)
return
@replace(skinsdir,previewbase,path=preview)
def go(f):
shutil.copyfileobj(r,f)
return tryget
pipeline.append(url,closure(skinsdir,previewbase,preview,s),
"Couldn't get {} because of a".format(
s["id"]))
if not foundOne:
print("No skins updated on this page. Seems we're done?")
#raise SystemExit
addpage(curpage)
while pages > curpage:
curpage = curpage + 1
addpage(curpage)
print("Skins have been updated!")