I try a small python script to test my SPARQL request. However, just the next simple code doesn't work.
from SPARQLWrapper import SPARQLWrapper, JSON
import rdflib
#connect to the sparql point
sparql = SPARQLWrapper("http://localhost:3030/sparql")
#SPARQL request
sparql.setQuery("""
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
PREFIX owl: <http://www.w3.org/2002/07/owl#>
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
PREFIX rme: <http://www.semanticweb.org/reminer/>
SELECT ?o
WHERE { ?s ?p ?o }
LIMIT 1
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
for result in results["results"]["bindings"]:
print(result["o"]["value"])
My code freezes a long time at the conversion step then gives me an URLError. And when I stop the script, see the next message:
HTTPError Traceback (most recent call last)
<ipython-input-6-2ab63307a418> in <module>()
18 """)
19 sparql.setReturnFormat(JSON)
---> 20 results = sparql.query().convert()
21
22 for result in results["results"]["bindings"]:
/Users/francocy/anaconda/lib/python3.4/site-packages/SPARQLWrapper/Wrapper.py in query(self)
533 @rtype: L{QueryResult} instance
534 """
--> 535 return QueryResult(self._query())
536
537 def queryAndConvert(self):
/Users/francocy/anaconda/lib/python3.4/site-packages/SPARQLWrapper/Wrapper.py in _query(self)
513 raise EndPointInternalError(e.read())
514 else:
--> 515 raise e
516
517 def query(self):
/Users/francocy/anaconda/lib/python3.4/site-packages/SPARQLWrapper/Wrapper.py in _query(self)
503
504 try:
--> 505 response = urlopener(request)
506 return response, self.returnFormat
507 except urllib.error.HTTPError as e:
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context)
159 else:
160 opener = _opener
--> 161 return opener.open(url, data, timeout)
162
163 def install_opener(opener):
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in open(self, fullurl, data, timeout)
467 for processor in self.process_response.get(protocol, []):
468 meth = getattr(processor, meth_name)
--> 469 response = meth(req, response)
470
471 return response
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in http_response(self, request, response)
577 if not (200 <= code < 300):
578 response = self.parent.error(
--> 579 'http', request, response, code, msg, hdrs)
580
581 return response
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in error(self, proto, *args)
505 if http_err:
506 args = (dict, 'default', 'http_error_default') + orig_args
--> 507 return self._call_chain(*args)
508
509 # XXX probably also want an abstract factory that knows when it makes
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in _call_chain(self, chain, kind, meth_name, *args)
439 for handler in handlers:
440 func = getattr(handler, meth_name)
--> 441 result = func(*args)
442 if result is not None:
443 return result
/Users/francocy/anaconda/lib/python3.4/urllib/request.py in http_error_default(self, req, fp, code, msg, hdrs)
585 class HTTPDefaultErrorHandler(BaseHandler):
586 def http_error_default(self, req, fp, code, msg, hdrs):
--> 587 raise HTTPError(req.full_url, code, msg, hdrs, fp)
588
589 class HTTPRedirectHandler(BaseHandler):
HTTPError: HTTP Error 403: Forbidden
I've the same situation with both 2.7 and 3.4. EDIT: I changed my connection from Wifi to intranet. My script works well with DBpedia Sparql endpoint but I've a Http error when I request on my local server. It seems to be a problem of proxy or access to my local server.
Thanks in advance for your help.
If you attempt to execute some SPARQL request on your local fuseki server through a python script, you could be disturbed by some proxy problem. To resolve the problem you could use the auto-detect property of urllib.