2021-03-23 17:36:43 -04:00
|
|
|
import json, logging, re, time, urllib.request, urllib.parse, urllib.error, zipfile
|
2011-09-07 22:33:26 -04:00
|
|
|
try:
|
|
|
|
import tarfile
|
|
|
|
except:
|
|
|
|
tarfile = None
|
2011-08-16 03:36:04 -04:00
|
|
|
import os, sys, shutil
|
2017-03-07 22:28:15 -05:00
|
|
|
try:
|
|
|
|
from pnc.attrdict import AttrDict
|
|
|
|
except ImportError:
|
|
|
|
# Fall back on the old location - just in case
|
|
|
|
from pnc.dep.attrdict import AttrDict
|
2011-05-26 03:42:05 -04:00
|
|
|
|
2017-01-25 10:51:58 -05:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
2016-12-10 22:34:03 -05:00
|
|
|
USER_TYPE = "user"
|
2011-06-27 00:56:29 -04:00
|
|
|
# user - for normal people
|
|
|
|
# beta - for the original beta testers
|
|
|
|
# dev - used to be for git users, now it's anyone with the 3.41 beta
|
|
|
|
# edge - new git stuff. bleeding edge, do not try at home (kiooeht version)
|
2011-05-26 03:40:30 -04:00
|
|
|
|
2011-11-28 23:13:56 -05:00
|
|
|
INSTALL_TYPE = "installer"
|
2011-08-16 03:36:04 -04:00
|
|
|
# installer - Windows/Mac installer (exe/dmg)
|
|
|
|
# zip - Windows zip (zip)
|
|
|
|
# source - Win/Linux/Mac source code (zip/tar)
|
|
|
|
|
|
|
|
OS_TYPE = sys.platform # win32, linux, darwin
|
|
|
|
if OS_TYPE.startswith("linux"):
|
|
|
|
OS_TYPE = "linux"
|
|
|
|
elif OS_TYPE == "darwin":
|
|
|
|
OS_TYPE = "mac"
|
|
|
|
|
2017-01-21 21:14:21 -05:00
|
|
|
# These will eventually be phased out
|
2011-06-26 17:37:28 -04:00
|
|
|
_pcMajor = "3.41"
|
2016-12-10 21:42:01 -05:00
|
|
|
_pcMinor = "4"
|
|
|
|
_pcStatus = "A" # A = alpha
|
2011-05-27 01:53:45 -04:00
|
|
|
# B = beta
|
|
|
|
# RC = release candidate
|
|
|
|
# None = public release
|
2016-12-10 22:34:03 -05:00
|
|
|
_pcRevision = "13"
|
2011-05-26 03:40:30 -04:00
|
|
|
_pcVersion = ""
|
|
|
|
|
2017-01-21 21:14:21 -05:00
|
|
|
_updateCheckURL = "https://github.com/karxi/pesterchum/raw/master/VERSION.js"
|
2017-01-25 10:51:58 -05:00
|
|
|
_downloadURL = "https://github.com/karxi/pesterchum/archive/master.zip"
|
2017-01-21 21:14:21 -05:00
|
|
|
|
|
|
|
jsodeco = json.JSONDecoder()
|
|
|
|
|
2017-01-25 10:51:58 -05:00
|
|
|
# Whether or not we've completed an update (requires a restart).
|
|
|
|
has_updated = False
|
|
|
|
|
2017-03-07 22:28:15 -05:00
|
|
|
def _py_version_check():
|
2021-03-23 17:36:43 -04:00
|
|
|
pass
|
|
|
|
# We are using python 3 rn :)
|
2017-03-07 22:28:15 -05:00
|
|
|
|
2017-01-21 21:14:21 -05:00
|
|
|
# Not 100% finished - certain output formats seem odd
|
|
|
|
def get_pchum_ver(raw=0, pretty=False, file=None, use_hard_coded=None):
|
|
|
|
# If use_hard_coded is None, we don't care. If it's False, we won't use it.
|
|
|
|
getrawlines = lambda fobj: [ x.strip() for x in fobj.readlines() ]
|
|
|
|
if file:
|
|
|
|
# Don't fall back onto defaults if we were given a file.
|
|
|
|
use_hard_coded = False
|
|
|
|
|
|
|
|
try:
|
|
|
|
if use_hard_coded:
|
|
|
|
# This is messy code, but we just want it to work for now.
|
|
|
|
raise ValueError
|
|
|
|
|
|
|
|
if file:
|
|
|
|
# Leave closing this to the caller.
|
|
|
|
raw_ver = getrawlines(file)
|
|
|
|
else:
|
|
|
|
# Open our default file ourselves.
|
|
|
|
with open("VERSION.js", 'r') as fo:
|
|
|
|
raw_ver = getrawlines(fo)
|
|
|
|
raw_ver = ' '.join(raw_ver)
|
|
|
|
# Now that we have the actual version, we can just set everything up
|
|
|
|
# neatly.
|
|
|
|
ver = jsodeco.decode(raw_ver)
|
2021-03-23 17:36:43 -04:00
|
|
|
ver = AttrDict( (k.encode('ascii'), v) for k, v in list(ver.items()) )
|
2017-01-21 21:14:21 -05:00
|
|
|
# Do a bit of compensation for the unicode part of JSON.
|
|
|
|
ver.status, ver.utype = str(ver.status), str(ver.utype)
|
|
|
|
except:
|
|
|
|
if use_hard_coded == False:
|
|
|
|
# We refuse to use the hard-coded values, period.
|
|
|
|
raise
|
|
|
|
|
|
|
|
global _pcMajor, _pcMinor, _pcStatus, _pcRevision, USER_TYPE
|
|
|
|
ver = AttrDict({
|
|
|
|
"major": _pcMajor, "minor": _pcMinor,
|
|
|
|
"status": _pcStatus, "rev": _pcRevision,
|
|
|
|
"utype": USER_TYPE
|
|
|
|
})
|
|
|
|
|
|
|
|
ver.major = float(ver.major)
|
|
|
|
ver.minor = int(ver.minor)
|
|
|
|
if not ver.status:
|
|
|
|
ver.status = None
|
|
|
|
ver.rev = int(ver.rev)
|
|
|
|
if raw:
|
|
|
|
if raw > 1:
|
|
|
|
# Give the AttrDict.
|
|
|
|
return ver
|
|
|
|
else:
|
|
|
|
# Give a tuple.
|
|
|
|
return (ver.major, ver.minor, ver.status, ver.rev, ver.utype)
|
|
|
|
# Compose the version information into a string.
|
|
|
|
# We usually specify the format for this pretty strictly.
|
|
|
|
# We wnat it to look like "3.14.01-A07", for example.
|
|
|
|
elif pretty:
|
|
|
|
if pretty > True:
|
|
|
|
# True == 1; we get here if pretty is greater than 1
|
|
|
|
if ver.utype == "edge":
|
|
|
|
# If this is an edge build, the other types don't really
|
|
|
|
# matter.
|
|
|
|
ver.status = "Bleeding Edge"
|
|
|
|
else:
|
|
|
|
statuses = {
|
|
|
|
# These are slightly unnecessary, but....
|
|
|
|
"A": "Alpha",
|
|
|
|
"B": "Beta",
|
|
|
|
"RC": "Release Candidate"
|
|
|
|
}
|
|
|
|
# Pick a status or don't give one.
|
|
|
|
ver.status = statuses.get(ver.status, "")
|
|
|
|
if ver.status:
|
|
|
|
ver.status = " " + ver.status
|
|
|
|
# Not the same as the original output, but it seems nicer.
|
|
|
|
retval = "{major:.2f}.{minor:02d}{status!s} {rev:02d}"
|
|
|
|
else:
|
|
|
|
retval = "{major:.2f}.{minor:02d}-r{rev:02d}{status!s} ({utype!s})"
|
|
|
|
elif ver.status:
|
|
|
|
retval = "{major:.2f}.{minor:02d}-{status!s}{rev:02d}"
|
|
|
|
else:
|
|
|
|
retval = "{major:.2f}.{minor:02d}.{rev:02d}"
|
|
|
|
return retval.format(**ver)
|
|
|
|
|
2011-05-26 03:40:30 -04:00
|
|
|
def pcVerCalc():
|
|
|
|
global _pcVersion
|
2017-01-21 21:14:21 -05:00
|
|
|
|
|
|
|
# The logic for this has been moved for the sake of ease of use.
|
|
|
|
_pcVersion = get_pchum_ver(raw=False)
|
|
|
|
|
2011-05-26 03:42:05 -04:00
|
|
|
|
2011-07-10 07:54:04 -04:00
|
|
|
def lexVersion(short=False):
|
|
|
|
if not _pcStatus:
|
|
|
|
return "%s.%s" % (_pcMajor, _pcMinor)
|
|
|
|
|
|
|
|
utype = ""
|
|
|
|
if USER_TYPE == "edge":
|
|
|
|
utype = "E"
|
|
|
|
|
|
|
|
if short:
|
|
|
|
return "%s.%s%s%s%s" % (_pcMajor, _pcMinor, _pcStatus, _pcRevision, utype);
|
|
|
|
|
|
|
|
stype = ""
|
|
|
|
if _pcStatus == "A":
|
|
|
|
stype = "Alpha"
|
|
|
|
elif _pcStatus == "B":
|
|
|
|
stype = "Beta"
|
|
|
|
elif _pcStatus == "RC":
|
|
|
|
stype = "Release Candidate"
|
|
|
|
|
|
|
|
if utype == "E":
|
|
|
|
utype = " Bleeding Edge"
|
|
|
|
|
|
|
|
return "%s.%s %s %s%s" % (_pcMajor, _pcMinor, stype, _pcRevision, utype);
|
|
|
|
|
|
|
|
# Naughty I know, but it lets me grab it from the bash script.
|
|
|
|
if __name__ == "__main__":
|
2021-03-23 17:36:43 -04:00
|
|
|
print(lexVersion())
|
2011-07-10 07:54:04 -04:00
|
|
|
|
2011-05-26 03:42:05 -04:00
|
|
|
def verStrToNum(ver):
|
2011-05-27 01:53:45 -04:00
|
|
|
w = re.match("(\d+\.?\d+)\.(\d+)-?([A-Za-z]{0,2})\.?(\d*):(\S+)", ver)
|
2011-05-26 03:42:05 -04:00
|
|
|
if not w:
|
2021-03-23 17:36:43 -04:00
|
|
|
print("Update check Failure: 3"); return
|
2011-05-26 03:42:05 -04:00
|
|
|
full = ver[:ver.find(":")]
|
|
|
|
return full,w.group(1),w.group(2),w.group(3),w.group(4),w.group(5)
|
|
|
|
|
2017-01-21 21:14:21 -05:00
|
|
|
def is_outdated(url=None):
|
|
|
|
if not url:
|
|
|
|
global _updateCheckURL
|
|
|
|
url = _updateCheckURL
|
|
|
|
|
|
|
|
# karxi: Do we really need to sleep here? Why?
|
|
|
|
time.sleep(3)
|
|
|
|
try:
|
2021-03-23 17:36:43 -04:00
|
|
|
jsfile = urllib.request.urlopen(_updateCheckURL)
|
2017-01-21 21:14:21 -05:00
|
|
|
gitver = get_pchum_ver(raw=2, file=jsfile)
|
|
|
|
except:
|
|
|
|
# No error handling yet....
|
|
|
|
raise
|
|
|
|
finally:
|
|
|
|
jsfile.close()
|
|
|
|
ourver = get_pchum_ver(raw=2)
|
|
|
|
|
|
|
|
# Now we can compare.
|
|
|
|
outdated = False
|
|
|
|
# What, if anything, tipped us off
|
|
|
|
trigger = None
|
|
|
|
keys = ("major", "minor", "rev", "status")
|
|
|
|
for k in keys:
|
|
|
|
if gitver[k] > ourver[k]:
|
|
|
|
# We don't test for 'bleeding edge' just yet.
|
|
|
|
trigger = k
|
|
|
|
outdated = True
|
|
|
|
if outdated:
|
|
|
|
logger.info(
|
|
|
|
"Out of date (newer is {0!r} {1} to our {2})".format(
|
|
|
|
trigger, gitver[trigger], ourver[trigger]))
|
|
|
|
return outdated
|
|
|
|
# So now all that's left to do is to set up the actual downloading of
|
|
|
|
# updates...or at least a notifier, until it can be automated.
|
|
|
|
|
2017-01-25 10:51:58 -05:00
|
|
|
def updatePesterchum(url=None):
|
|
|
|
# TODO: This is still WIP; the actual copying needs to be adjusted.
|
|
|
|
if url is None:
|
|
|
|
global _downloadURL
|
|
|
|
url = _downloadURL
|
|
|
|
|
|
|
|
try:
|
|
|
|
# Try to fetch the update.
|
2021-03-23 17:36:43 -04:00
|
|
|
fn, fninfo = urllib.request.urlretrieve(url)
|
|
|
|
except urllib.error.ContentTooShortError:
|
2017-01-25 10:51:58 -05:00
|
|
|
# Our download was interrupted; there's not really anything we can do
|
|
|
|
# here.
|
|
|
|
raise
|
|
|
|
|
|
|
|
ext = osp.splitext(fn)
|
|
|
|
|
|
|
|
if ext == ".zip":
|
|
|
|
import zipfile
|
|
|
|
is_updatefile = zipfile.is_zipfile
|
|
|
|
openupdate = zipfile.ZipFile
|
|
|
|
elif tarfile and ext.startswith(".tar"):
|
|
|
|
import tarfile
|
|
|
|
is_updatefile = tarfile.is_tarfile
|
|
|
|
openupdate = tarfile.open
|
|
|
|
else:
|
|
|
|
logger.info("No handler available for update {0!r}".format(fn))
|
|
|
|
return
|
|
|
|
logger.info("Opening update {0!s} {1!r} ...".format(ext, fn))
|
|
|
|
|
|
|
|
if is_updatefile(fn):
|
|
|
|
update = openupdate(fn, 'r')
|
|
|
|
tmpfldr, updfldr = "tmp", "update"
|
|
|
|
|
|
|
|
# Set up the folder structure.
|
|
|
|
if osp.exists(updfldr):
|
|
|
|
# We'll need this later.
|
|
|
|
shutil.rmtree(updfldr)
|
|
|
|
if osp.exists(tmpfldr):
|
|
|
|
shutil.rmtree(tmpfldr)
|
|
|
|
os.mkdir(tmpfldr)
|
|
|
|
update.extractall(tmpfldr)
|
|
|
|
contents = os.listdir(tmpfldr)
|
|
|
|
|
|
|
|
# Is there only one folder here? Git likes to do this with repos.
|
|
|
|
# If there is, move it to our update folder.
|
|
|
|
# If there isn't, move the temp directory to our update folder.
|
|
|
|
if len(tmpcts) == 1:
|
|
|
|
arcresult = osp.join(tmpfldr, contents[0])
|
|
|
|
if osp.isdir(arcresult):
|
|
|
|
shutil.move(arcresult, updfldr)
|
|
|
|
else:
|
|
|
|
shutil.move(tmpfldr, updfldr)
|
|
|
|
# Remove the temporary folder.
|
|
|
|
os.rmdir(tmpfldr)
|
|
|
|
# Remove the update file.
|
|
|
|
os.remove(fn)
|
|
|
|
# ... What does this even do? It recurses....
|
|
|
|
removeCopies(updfldr)
|
|
|
|
# Why do these both skip the first seven characters?!
|
|
|
|
copyUpdate(updfldr)
|
|
|
|
|
|
|
|
# Finally, remove the update folder.
|
|
|
|
shutil.rmtree(updfldr)
|
|
|
|
|
2011-08-16 03:36:04 -04:00
|
|
|
def updateCheck(q):
|
2017-01-21 21:17:07 -05:00
|
|
|
# karxi: Disabled for now; causing issues.
|
|
|
|
# There should be an alternative system in place soon.
|
|
|
|
return q.put((False,0))
|
|
|
|
|
2011-05-29 14:02:10 -04:00
|
|
|
time.sleep(3)
|
2021-03-23 17:36:43 -04:00
|
|
|
data = urllib.parse.urlencode({"type" : USER_TYPE, "os" : OS_TYPE, "install" : INSTALL_TYPE})
|
2011-05-26 03:42:05 -04:00
|
|
|
try:
|
2021-03-23 17:36:43 -04:00
|
|
|
f = urllib.request.urlopen("http://distantsphere.com/pesterchum.php?" + data)
|
2011-05-26 03:42:05 -04:00
|
|
|
except:
|
2021-03-23 17:36:43 -04:00
|
|
|
print("Update check Failure: 1"); return q.put((False,1))
|
2011-05-26 03:42:05 -04:00
|
|
|
newest = f.read()
|
|
|
|
f.close()
|
2011-05-27 01:53:45 -04:00
|
|
|
if not newest or newest[0] == "<":
|
2021-03-23 17:36:43 -04:00
|
|
|
print("Update check Failure: 2"); return q.put((False,2))
|
2011-05-26 03:42:05 -04:00
|
|
|
try:
|
|
|
|
(full, major, minor, status, revision, url) = verStrToNum(newest)
|
|
|
|
except TypeError:
|
2011-05-29 14:02:10 -04:00
|
|
|
return q.put((False,3))
|
2021-03-23 17:36:43 -04:00
|
|
|
print(full)
|
|
|
|
print(repr(verStrToNum(newest)))
|
2016-12-10 22:34:03 -05:00
|
|
|
|
2011-05-27 01:53:45 -04:00
|
|
|
if major <= _pcMajor:
|
|
|
|
if minor <= _pcMinor:
|
|
|
|
if status:
|
|
|
|
if status <= _pcStatus:
|
|
|
|
if revision <= _pcRevision:
|
2011-05-29 14:02:10 -04:00
|
|
|
return q.put((False,0))
|
2011-05-27 01:53:45 -04:00
|
|
|
else:
|
|
|
|
if not _pcStatus:
|
|
|
|
if revision <= _pcRevision:
|
2011-05-29 14:02:10 -04:00
|
|
|
return q.put((False,0))
|
2021-03-23 17:36:43 -04:00
|
|
|
print("A new version of Pesterchum is avaliable!")
|
2011-05-29 14:02:10 -04:00
|
|
|
q.put((full,url))
|
2011-08-16 03:36:04 -04:00
|
|
|
|
|
|
|
|
|
|
|
def removeCopies(path):
|
|
|
|
for f in os.listdir(path):
|
2017-01-25 10:51:58 -05:00
|
|
|
filePath = osp.join(path, f)
|
|
|
|
trunc, rem = filePath[:7], filePath[7:]
|
|
|
|
if not osp.isdir(filePath):
|
|
|
|
if osp.exists(rem):
|
|
|
|
logger.debug(
|
|
|
|
"{0: <4}Deleting copy: {1!r} >{2!r}<".format(
|
|
|
|
'', trunc, rem)
|
|
|
|
)
|
|
|
|
os.remove(rem)
|
2011-08-16 03:36:04 -04:00
|
|
|
else:
|
2017-01-25 10:51:58 -05:00
|
|
|
# Recurse
|
2011-08-16 03:36:04 -04:00
|
|
|
removeCopies(filePath)
|
|
|
|
|
|
|
|
def copyUpdate(path):
|
|
|
|
for f in os.listdir(path):
|
2017-01-25 10:51:58 -05:00
|
|
|
filePath = osp.join(path, f)
|
|
|
|
trunc, rem = filePath[:7], filePath[7:]
|
|
|
|
if not osp.isdir(filePath):
|
|
|
|
logger.debug(
|
|
|
|
"{0: <4}Making copy: {1!r} ==> {2!r}".format(
|
|
|
|
'', filePath, rem)
|
|
|
|
)
|
|
|
|
shutil.copy2(filePath, rem)
|
2011-08-16 03:36:04 -04:00
|
|
|
else:
|
2017-01-25 10:51:58 -05:00
|
|
|
if not osp.exists(rem):
|
|
|
|
os.mkdir(rem)
|
|
|
|
# Recurse
|
2011-08-16 03:36:04 -04:00
|
|
|
copyUpdate(filePath)
|
|
|
|
|
|
|
|
def updateExtract(url, extension):
|
|
|
|
if extension:
|
|
|
|
fn = "update" + extension
|
2021-03-23 17:36:43 -04:00
|
|
|
urllib.request.urlretrieve(url, fn)
|
2011-08-16 03:36:04 -04:00
|
|
|
else:
|
2021-03-23 17:36:43 -04:00
|
|
|
fn = urllib.request.urlretrieve(url)[0]
|
2011-09-07 22:33:26 -04:00
|
|
|
if tarfile and tarfile.is_tarfile(fn):
|
2011-08-16 03:36:04 -04:00
|
|
|
extension = ".tar.gz"
|
|
|
|
elif zipfile.is_zipfile(fn):
|
|
|
|
extension = ".zip"
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
from libs import magic # :O I'M IMPORTING /MAGIC/!! HOLY SHIT!
|
|
|
|
mime = magic.from_file(fn, mime=True)
|
|
|
|
if mime == 'application/octet-stream':
|
|
|
|
extension = ".exe"
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
2021-03-23 17:36:43 -04:00
|
|
|
print(url, fn, extension)
|
2011-08-16 03:36:04 -04:00
|
|
|
|
|
|
|
if extension == ".exe":
|
|
|
|
pass
|
|
|
|
elif extension == ".zip" or extension.startswith(".tar"):
|
|
|
|
if extension == ".zip":
|
|
|
|
from zipfile import is_zipfile as is_updatefile, ZipFile as openupdate
|
2021-03-23 17:36:43 -04:00
|
|
|
print("Opening .zip")
|
2011-09-07 22:33:26 -04:00
|
|
|
elif tarfile and extension.startswith(".tar"):
|
2011-08-16 03:36:04 -04:00
|
|
|
from tarfile import is_tarfile as is_updatefile, open as openupdate
|
2021-03-23 17:36:43 -04:00
|
|
|
print("Opening .tar")
|
2011-09-07 22:33:26 -04:00
|
|
|
else:
|
|
|
|
return
|
2011-08-16 03:36:04 -04:00
|
|
|
|
|
|
|
if is_updatefile(fn):
|
|
|
|
update = openupdate(fn, 'r')
|
|
|
|
if os.path.exists("tmp"):
|
|
|
|
shutil.rmtree("tmp")
|
|
|
|
os.mkdir("tmp")
|
|
|
|
update.extractall("tmp")
|
|
|
|
tmp = os.listdir("tmp")
|
|
|
|
if os.path.exists("update"):
|
|
|
|
shutil.rmtree("update")
|
|
|
|
if len(tmp) == 1 and \
|
|
|
|
os.path.isdir("tmp/"+tmp[0]):
|
|
|
|
shutil.move("tmp/"+tmp[0], "update")
|
|
|
|
else:
|
|
|
|
shutil.move("tmp", "update")
|
|
|
|
os.rmdir("tmp")
|
|
|
|
os.remove(fn)
|
|
|
|
removeCopies("update")
|
|
|
|
copyUpdate("update")
|
|
|
|
shutil.rmtree("update")
|
|
|
|
|
|
|
|
def updateDownload(url):
|
|
|
|
extensions = [".exe", ".zip", ".tar.gz", ".tar.bz2"]
|
|
|
|
found = False
|
|
|
|
for e in extensions:
|
|
|
|
if url.endswith(e):
|
|
|
|
found = True
|
|
|
|
updateExtract(url, e)
|
|
|
|
if not found:
|
|
|
|
if url.startswith("https://github.com/") and url.count('/') == 4:
|
|
|
|
updateExtract(url+"/tarball/master", None)
|
|
|
|
else:
|
|
|
|
updateExtract(url, None)
|