Merge branch 'main' of gitlab.constantvzw.org:anais_berck/queerying-wikidata into main
commit
73b392789a
@ -0,0 +1,16 @@
|
|||||||
|
from random import choice
|
||||||
|
|
||||||
|
material = ['SAND', 'DUST', 'LEAVES', 'PAPER', 'TIN', 'ROOTS', 'BRICK', 'STONE', 'DISCARDED CLOTHING', 'GLASS', 'STEEL', 'PLASTIC', 'MUD', 'BROKEN DISHES', 'WOOD', 'STRAW', 'WEEDS']
|
||||||
|
|
||||||
|
location = ['IN A GREEN, MOSSY TERRAIN', 'IN AN OVERPOPULATED AREA', 'BY THE SEA', 'BY AN ABANDONED LAKE', 'IN A DESERTED FACTORY', 'IN DENSE WOODS', 'IN JAPAN', 'AMONG SMALL HILLS', 'IN SOUTHERN FRANCE', 'AMONG HIGH MOUNTAINS', 'ON AN ISLAND', 'IN A COLD, WINDY CLIMATE', 'IN A PLACE WITH BOTH HEAVY RAIN AND BRIGHT SUN', 'IN A DESERTED AIRPORT', 'IN A HOT CLIMATE', 'INSIDE A MOUNTAIN', 'ON THE SEA', 'IN MICHIGAN', 'IN HEAVY JUNGLE UNDERGROWTH', 'BY A RIVER', 'AMONG OTHER HOUSES', 'IN A DESERTED CHURCH', 'IN A METROPOLIS', 'UNDERWATER']
|
||||||
|
|
||||||
|
light_source = ['CANDLES', 'ALL AVAILABLE LIGHTING', 'ELECTRICITY', 'NATURAL LIGHT']
|
||||||
|
|
||||||
|
inhabitants = ['PEOPLE WHO SLEEP VERY LITTLE', 'VEGETARIANS', 'HORSES AND BIRDS', 'PEOPLE SPEAKING MANY LANGUAGES WEARING LITTLE OR NO CLOTHING', 'ALL RACES OF MEN REPRESENTED WEARING PREDOMINANTLY RED CLOTHING', 'CHILDREN AND OLD PEOPLE', 'VARIOUS BIRDS AND FISH', 'LOVERS', 'PEOPLE WHO ENJOY EATING TOGETHER', 'PEOPLE WHO EAT A GREAT DEAL', 'COLLECTORS OF ALL TYPES', 'FRIENDS AND ENEMIES', 'PEOPLE WHO SLEEP ALMOST ALL THE TIME', 'VERY TALL PEOPLE', 'AMERICAN INDIANS', 'LITTLE BOYS', 'PEOPLE FROM MANY WALKS OF LIFE', 'NEGROS WEARING ALL COLORS', 'FRIENDS', 'FRENCH AND GERMAN SPEAKING PEOPLE', 'FISHERMEN AND FAMILIES', 'PEOPLE WHO LOVE TO READ']
|
||||||
|
|
||||||
|
print('')
|
||||||
|
print('A HOUSE OF ' + choice(material))
|
||||||
|
print(' ' + choice(location))
|
||||||
|
print(' USING ' + choice(light_source))
|
||||||
|
print(' INHABITED BY ' + choice(inhabitants))
|
||||||
|
print('')
|
@ -0,0 +1,59 @@
|
|||||||
|
# import rdflib
|
||||||
|
# g = rdflib.Graph()
|
||||||
|
# https://stackoverflow.com/questions/68824808/query-wikidata-rest-api-with-related-identifier
|
||||||
|
|
||||||
|
from urllib.request import urlopen
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
def get_info (wdurl):
|
||||||
|
m = re.search(r"^https?:\/\/www\.wikidata\.org\/(wiki|entity)/(?P<q>Q\d+)$", wdurl)
|
||||||
|
if m is not None:
|
||||||
|
qid = m.groupdict()['q']
|
||||||
|
# print ("qid", qid)
|
||||||
|
url = f"https://www.wikidata.org/wiki/Special:EntityData/{qid}.json?flavor=simple"
|
||||||
|
return json.load(urlopen(url))['entities'][qid]
|
||||||
|
else:
|
||||||
|
print("unrecognized url")
|
||||||
|
|
||||||
|
def query (q, format="json", endpoint="https://query.wikidata.org/sparql"):
|
||||||
|
p = {}
|
||||||
|
p['format'] = format
|
||||||
|
p['query'] = q
|
||||||
|
f = urlopen(endpoint+"?"+urlencode(p))
|
||||||
|
return json.load(f)['results']['bindings']
|
||||||
|
|
||||||
|
|
||||||
|
q1 = """
|
||||||
|
PREFIX p: <http://www.wikidata.org/prop/>
|
||||||
|
PREFIX ps: <http://www.wikidata.org/prop/statement/>
|
||||||
|
|
||||||
|
SELECT DISTINCT ?item ?statement0 ?material WHERE
|
||||||
|
{
|
||||||
|
?item p:P186 ?statement0.
|
||||||
|
?statement0 ps:P186 ?material.
|
||||||
|
}
|
||||||
|
LIMIT 10
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
print (q1)
|
||||||
|
print ()
|
||||||
|
from time import sleep
|
||||||
|
|
||||||
|
for result in query(q1):
|
||||||
|
statement = result['statement0']['value']
|
||||||
|
value, item = result['material']['value'], result['item']['value']
|
||||||
|
value = get_info(value)
|
||||||
|
item = get_info(item)
|
||||||
|
# print ("value", value)
|
||||||
|
# print ("item", item)
|
||||||
|
try:
|
||||||
|
material_label, item_label = value['labels']['en']['value'], item['labels']['en']['value']
|
||||||
|
print (f"A {item_label} made of {material_label}. See {statement}")
|
||||||
|
print ()
|
||||||
|
sleep(3)
|
||||||
|
except KeyError:
|
||||||
|
print ("no english label?")
|
Loading…
Reference in New Issue