Skip to content

Issue 128 #135

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 29 additions & 22 deletions lib/pyld/documentloader/requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,40 +10,38 @@
.. moduleauthor:: Olaf Conradi <[email protected]>
"""
import string
import re
import urllib.parse as urllib_parse
from json import JSONDecodeError

from pyld.jsonld import (JsonLdError, parse_link_header, LINK_HEADER_REL)
from pyld.jsonld import (JsonLdError, parse_link_header, prepend_base, LINK_HEADER_REL)


def requests_document_loader(secure=False, **kwargs):
def requests_document_loader(secure=False, max_link_follows=2, **kwargs):
"""
Create a Requests document loader.

Can be used to setup extra Requests args such as verify, cert, timeout,
or others.

:param secure: require all requests to use HTTPS (default: False).
:param max_link_follows: Maximum number of alternate link follows allowed.
:param **kwargs: extra keyword args for Requests get() call.

:return: the RemoteDocument loader function.
"""
import requests

def loader(url, options={}):
def loader(url, options={}, link_follow_count=0):
"""
Retrieves JSON-LD at the given URL.

:param url: the URL to retrieve.

:return: the RemoteDocument.
"""
try:
# validate URL
pieces = urllib_parse.urlparse(url)
if (not all([pieces.scheme, pieces.netloc]) or
pieces.scheme not in ['http', 'https'] or
set(pieces.netloc) > set(
string.ascii_letters + string.digits + '-.:')):
pieces.scheme not in ['http', 'https'] or
set(pieces.netloc) > set(
string.ascii_letters + string.digits + '-.:')):
raise JsonLdError(
'URL could not be dereferenced; only "http" and "https" '
'URLs are supported.',
Expand All @@ -69,30 +67,39 @@ def loader(url, options={}):
'contentType': content_type,
'contextUrl': None,
'documentUrl': response.url,
'document': response.json()
'document': None
}
# Try loading the JSON if the content_type matches
# A failure here means the response body is not valid json
if re.match(r'^application\/(\w*\+)?json$', content_type):
doc['document'] = response.json()
# if content_type in headers['Accept']:
# doc['document'] = response.json()
link_header = response.headers.get('link')
if link_header:
linked_context = parse_link_header(link_header).get(
LINK_HEADER_REL)
# only 1 related link header permitted
if linked_context and content_type != 'application/ld+json':
if isinstance(linked_context, list):
raise JsonLdError(
'URL could not be dereferenced, '
'it has more than one '
'associated HTTP Link Header.',
'jsonld.LoadDocumentError',
{'url': url},
code='multiple context link headers')
doc['contextUrl'] = linked_context['target']
if isinstance(linked_context, list):
raise JsonLdError(
'URL could not be dereferenced, '
'it has more than one '
'associated HTTP Link Header.',
'jsonld.LoadDocumentError',
{'url': url},
code='multiple context link headers')
doc['contextUrl'] = linked_context['target']
linked_alternate = parse_link_header(link_header).get('alternate')
# if not JSON-LD, alternate may point there
if (linked_alternate and
linked_alternate.get('type') == 'application/ld+json' and
not re.match(r'^application\/(\w*\+)?json$', content_type)):
doc['contentType'] = 'application/ld+json'
doc['documentUrl'] = jsonld.prepend_base(url, linked_alternate['target'])
doc['documentUrl'] = prepend_base(url, linked_alternate['target'])
if link_follow_count >= max_link_follows:
raise requests.TooManyRedirects(f"Exceeded maximum link header redirects ({max_link_follows})")
return loader(doc['documentUrl'], options=options, link_follow_count=link_follow_count + 1)
return doc
except JsonLdError as e:
raise e
Expand Down