updates to leaflet and mediawiki for recentchanges

master
Michael Murtaugh 7 years ago
parent 768c6456cc
commit fc83652e3b

@ -622,7 +622,7 @@ def make_gallery(args):
items.append(item)
# Ensure / Generate tiles per image
items.sort(key=lambda x: x['url'])
# items.sort(key=lambda x: x['url'])
tiles = []
for item in items:
n = item['url']
@ -637,11 +637,15 @@ def make_gallery(args):
if 'date' in item:
dt = parse8601(item['date'], "%d %b %Y")
caption += u'<span class="date">{0}</span>'.format(dt)
if 'url' in item:
ext = os.path.splitext(urlparse.urlparse(item['url']).path)[1]
if ext:
ext = ext[1:].upper()
caption += u'<a class="url" href="{0}">{1}</a>'.format(item['url'], ext)
if 'src' in item:
caption += u'<a class="url" href="{0}">{1}</a>'.format(item['src'], "SRC")
elif 'url' in item:
# ext = os.path.splitext(urlparse.urlparse(item['url']).path)[1]
# if ext:
# ext = ext[1:].upper()
caption += u'<a class="url" href="{0}">{1}</a>'.format(item['url'], "SRC")
if 'text' or 'date' in item:
caption += u'</p>';

@ -35,7 +35,11 @@ def wget (url, path, blocksize=4*1000):
def page_url (site, page):
# print ("[page_url]", page.name, file=sys.stderr)
base = os.path.split(site.site['base'])[0]
uret = os.path.join(base, urlquote(page.normalize_title(page.name)))
path = page.normalize_title(page.name)
if type(path) == unicode:
path = path.encode("utf-8")
path = urlquote(path)
uret = os.path.join(base, path)
# assert type(uret) == str
return uret
@ -377,12 +381,15 @@ def recentfiles (args):
# imageinfo = filepage.imageinfo
imageinfo = imageinfo_with_thumbnail(wiki, r['title'])
if not imageinfo['mime'].startswith("image/"):
print ("Skipping non image ({0}) {1}".format(imageinfo['mime'], r['title']))
print (u"Skipping non image ({0}) {1}".format(imageinfo['mime'], r['title']).encode("utf-8"), file=sys.stderr)
continue
if 'thumburl' not in imageinfo:
print (u"Skipping item with no thumburl {0}".format(r['title']).encode("utf-8"), file=sys.stderr)
continue
# Deal with edge case at items == aiend are returned
if last_date and r['timestamp'] == last_date:
print ("SKIPPING AIEND item", file=sys.stderr)
# print ("SKIPPING AIEND item", file=sys.stderr)
break
# Construct an item for output
@ -391,7 +398,8 @@ def recentfiles (args):
for usagepage in filepage.imageusage():
break # just grab the first usage page
# url : local path to file
imageurl = imageinfo['url']
# imageurl = imageinfo['url']
imageurl = imageinfo['thumburl']
localpath = imageurl.replace("https://pzwiki.wdka.nl/mw-mediadesign/images/", "wiki/")
# wget image from wiki to local folder
if not os.path.exists(localpath):
@ -405,19 +413,20 @@ def recentfiles (args):
item = {}
item['url'] = localpath
item['date'] = r['timestamp']
item['src'] = page_url(wiki, filepage)
userpage = wiki.pages.get('User:'+r['user'])
if usagepage:
item['text'] = '<a href="{0}">{1}</a><br>Uploaded by <a href="{2}">{3}</a>'.format(
item['text'] = u'<a href="{0}">{1}</a><br>Uploaded by <a href="{2}">{3}</a>'.format(
page_url(wiki, usagepage),
usagepage.page_title,
page_url(wiki, userpage),
r['user'])
r['user']).encode("utf-8")
else:
item['text'] = '<a href="{0}">{1}</a><br>Uploaded by <a href="{2}">{3}</a>'.format(
item['text'] = u'<a href="{0}">{1}</a><br>Uploaded by <a href="{2}">{3}</a>'.format(
page_url(wiki, filepage),
filepage.page_title,
page_url(wiki, userpage),
r['user'])
r['user']).encode("utf-8")
# print (json.dumps(item))
items_to_output.append(item)

Loading…
Cancel
Save