Source code for lakesuperior.endpoints.ldp

import hashlib
import logging
import pdb

from base64 import b64encode
from collections import defaultdict
from io import BytesIO
from pprint import pformat
from uuid import uuid4

import arrow

from flask import (
        Blueprint, Response, g, make_response, render_template,
        request, send_file)
from rdflib import Graph, plugin, parser#, serializer
from werkzeug.http import parse_date

from lakesuperior import env
from lakesuperior import exceptions as exc
from lakesuperior.api import resource as rsrc_api
from lakesuperior.dictionaries.namespaces import ns_collection as nsc
from lakesuperior.dictionaries.namespaces import ns_mgr as nsm
from lakesuperior.model.ldp.ldp_factory import LdpFactory
from lakesuperior.model.ldp.ldp_nr import LdpNr
from lakesuperior.model.ldp.ldp_rs import LdpRs
from lakesuperior.model.ldp.ldpr import RES_CREATED, Ldpr
from lakesuperior.util import toolbox
from lakesuperior.util.toolbox import RequestUtils


DEFAULT_RDF_MIMETYPE = 'text/turtle'
"""
Fallback serialization format used when no acceptable formats are specified.
"""

logger = logging.getLogger(__name__)
rdf_parsable_mimetypes = {
    mt.name for mt in plugin.plugins()
    if mt.kind is parser.Parser and '/' in mt.name
}
"""MIMEtypes that can be parsed into RDF."""

store = env.app_globals.rdf_store

rdf_serializable_mimetypes = {
    #mt.name for mt in plugin.plugins()
    #if mt.kind is serializer.Serializer and '/' in mt.name
    'application/ld+json',
    'application/n-triples',
    'application/rdf+xml',
    'text/turtle',
    'text/n3',
}
"""
MIMEtypes that RDF can be serialized into.

These are not automatically derived from RDFLib because only triple
(not quad) serializations are applicable.
"""

accept_patch = (
    'application/sparql-update',
)

std_headers = {
    'Accept-Patch' : ','.join(accept_patch),
    'Accept-Post' : ','.join(rdf_parsable_mimetypes),
}

"""Predicates excluded by view."""
vw_blacklist = {
}

"""Prefer representations currently supported"""
option_to_uri = {
    'embed_children': Ldpr.EMBED_CHILD_RES_URI,
    'incl_children': Ldpr.RETURN_CHILD_RES_URI,
    'incl_inbound': Ldpr.RETURN_INBOUND_REF_URI,
    'incl_srv_mgd': Ldpr.RETURN_SRV_MGD_RES_URI
}

ldp = Blueprint(
        'ldp', __name__, template_folder='templates',
        static_url_path='/static', static_folder='templates/static')
"""
Blueprint for LDP REST API. This is what is usually found under ``/rest/`` in
standard fcrepo4. Here, it is under ``/ldp`` but initially ``/rest`` will be
kept for backward compatibility.
"""

## ROUTE PRE- & POST-PROCESSING ##

[docs]@ldp.url_defaults def bp_url_defaults(endpoint, values): url_prefix = getattr(g, 'url_prefix', None) if url_prefix is not None: values.setdefault('url_prefix', url_prefix)
[docs]@ldp.url_value_preprocessor def bp_url_value_preprocessor(endpoint, values): g.url_prefix = values.pop('url_prefix') g.webroot = request.host_url + g.url_prefix # Normalize leading slashes for UID. if 'uid' in values: values['uid'] = '/' + values['uid'].lstrip('/') if 'parent_uid' in values: values['parent_uid'] = '/' + values['parent_uid'].lstrip('/')
[docs]@ldp.before_request def log_request_start(): logger.info('** Start {} {} **'.format(request.method, request.url))
[docs]@ldp.before_request def instantiate_req_vars(): g.tbox = RequestUtils()
[docs]@ldp.after_request def log_request_end(rsp): logger.info('** End {} {} **'.format(request.method, request.url)) return rsp
## REST SERVICES ##
[docs]@ldp.route('/<path:uid>', methods=['GET'], strict_slashes=False) @ldp.route('/', defaults={'uid': '/'}, methods=['GET'], strict_slashes=False) @ldp.route('/<path:uid>/fcr:metadata', defaults={'out_fmt' : 'rdf'}, methods=['GET']) @ldp.route('/<path:uid>/fcr:content', defaults={'out_fmt' : 'non_rdf'}, methods=['GET']) def get_resource(uid, out_fmt=None): r""" https://www.w3.org/TR/ldp/#ldpr-HTTP_GET Retrieve RDF or binary content. :param str uid: UID of resource to retrieve. The repository root has an empty string for UID. :param str out_fmt: Force output to RDF or non-RDF if the resource is a LDP-NR. This is not available in the API but is used e.g. by the ``\*/fcr:metadata`` and ``\*/fcr:content`` endpoints. The default is False. """ out_headers = std_headers.copy() repr_options = defaultdict(dict) # Fist check if it's not a 404 or a 410. try: if not rsrc_api.exists(uid): return '', 404 except exc.TombstoneError as e: return _tombstone_response(e, uid) # Then process the condition headers. cond_ret = _process_cond_headers(uid, request.headers) if cond_ret: return cond_ret # Then, business as usual. # Evaluate which representation is requested. if 'prefer' in request.headers: prefer = toolbox.parse_rfc7240(request.headers['prefer']) logger.debug('Parsed Prefer header: {}'.format(pformat(prefer))) if 'return' in prefer: repr_options = parse_repr_options(prefer['return'], out_headers) rsrc = rsrc_api.get(uid, repr_options) with store.txn_ctx(): if out_fmt is None: rdf_mimetype = _best_rdf_mimetype() out_fmt = ( 'rdf' if isinstance(rsrc, LdpRs) or rdf_mimetype is not None else 'non_rdf') out_headers.update(_headers_from_metadata(rsrc, out_fmt)) uri = g.tbox.uid_to_uri(uid) # RDF output. if out_fmt == 'rdf': if locals().get('rdf_mimetype', None) is None: rdf_mimetype = DEFAULT_RDF_MIMETYPE ggr = g.tbox.globalize_imr(rsrc.out_graph) ggr.namespace_manager = nsm rsp = _negotiate_content( ggr, rdf_mimetype, out_headers, uid=uid, uri=uri) if isinstance(rsrc, LdpNr): rsp.headers.add( 'Link', f'<{g.tbox.uid_to_uri(uid)}>', rel='describes') return rsp # Datastream. if not getattr(rsrc, 'local_path', False): return ('{} has no binary content.'.format(rsrc.uid), 404) logger.debug('Streaming out binary content.') if request.range and request.range.units == 'bytes': # Stream partial response. # This is only true if the header is well-formed. Thanks, Werkzeug. rsp = _parse_range_header( request.range.ranges, rsrc, out_headers ) else: rsp = make_response(send_file( rsrc.local_path, as_attachment=True, attachment_filename=rsrc.filename, mimetype=rsrc.mimetype), 200, out_headers) # This seems necessary to prevent Flask from setting an # additional ETag. if 'ETag' in out_headers: rsp.set_etag(out_headers['ETag']) rsp.headers.add('Link', f'<{uri}/fcr:metadata>; rel="describedby"') return rsp
[docs]@ldp.route('/<path:uid>/fcr:versions', methods=['GET']) def get_version_info(uid): """ Get version info (`fcr:versions`). :param str uid: UID of resource to retrieve versions for. """ rdf_mimetype = _best_rdf_mimetype() or DEFAULT_RDF_MIMETYPE try: imr = rsrc_api.get_version_info(uid) except exc.ResourceNotExistsError as e: return str(e), 404 except exc.InvalidResourceError as e: return str(e), 409 except exc.TombstoneError as e: return _tombstone_response(e, uid) else: with store.txn_ctx(): return _negotiate_content(g.tbox.globalize_imr(imr), rdf_mimetype)
[docs]@ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['GET']) def get_version(uid, ver_uid): """ Get an individual resource version. :param str uid: Resource UID. :param str ver_uid: Version UID. """ rdf_mimetype = _best_rdf_mimetype() or DEFAULT_RDF_MIMETYPE try: imr = rsrc_api.get_version(uid, ver_uid) except exc.ResourceNotExistsError as e: return str(e), 404 except exc.InvalidResourceError as e: return str(e), 409 except exc.TombstoneError as e: return _tombstone_response(e, uid) else: with store.txn_ctx(): return _negotiate_content(g.tbox.globalize_imr(imr), rdf_mimetype)
[docs]@ldp.route('/<path:parent_uid>', methods=['POST'], strict_slashes=False) @ldp.route('/', defaults={'parent_uid': '/'}, methods=['POST'], strict_slashes=False) def post_resource(parent_uid): """ https://www.w3.org/TR/ldp/#ldpr-HTTP_POST Add a new resource in a new URI. """ rsp_headers = std_headers.copy() slug = request.headers.get('Slug') try: kwargs = _create_args_from_req(slug) rsrc = rsrc_api.create(parent_uid, slug, **kwargs) except exc.RdfParsingError as e: return str(e), 400 except exc.IndigestibleError: return ( f'Unable to parse digest header: {request.headers["digest"]}' ), 400 except exc.ResourceNotExistsError as e: return str(e), 404 except (exc.InvalidResourceError, exc.ChecksumValidationError) as e: return str(e), 409 except exc.TombstoneError as e: return _tombstone_response(e, uid) except exc.ServerManagedTermError as e: rsp_headers['Link'] = ( f'<{uri}>; rel="{nsc["ldp"].constrainedBy}"; ' f'{g.webroot}/info/ldp_constraints"' ) return str(e), 412 uri = g.tbox.uid_to_uri(rsrc.uid) with store.txn_ctx(): rsp_headers.update(_headers_from_metadata(rsrc)) rsp_headers['Location'] = uri if kwargs.get('mimetype') and kwargs.get('rdf_fmt') is None: rsp_headers['Link'] = ( f'<{uri}/fcr:metadata>; rel="describedby"; anchor="{uri}"' ) return uri, 201, rsp_headers
[docs]@ldp.route('/<path:uid>', methods=['PUT'], strict_slashes=False) @ldp.route('/<path:uid>/fcr:metadata', defaults={'force_rdf' : True}, methods=['PUT']) def put_resource(uid): """ https://www.w3.org/TR/ldp/#ldpr-HTTP_PUT Add or replace a new resource at a specified URI. """ # Parse headers. logger.debug('Request headers: {}'.format(request.headers)) cond_ret = _process_cond_headers(uid, request.headers, False) if cond_ret: return cond_ret try: kwargs = _create_args_from_req(uid) evt, rsrc = rsrc_api.create_or_replace(uid, **kwargs) except exc.RdfParsingError as e: return str(e), 400 except exc.IndigestibleError: return ( f'Unable to parse digest header: {request.headers["digest"]}', 400) except ( exc.InvalidResourceError, exc.ChecksumValidationError, exc.ResourceExistsError) as e: return str(e), 409 except (exc.ServerManagedTermError, exc.SingleSubjectError) as e: return str(e), 412 except exc.IncompatibleLdpTypeError as e: return str(e), 415 except exc.TombstoneError as e: return _tombstone_response(e, uid) with store.txn_ctx(): rsp_headers = _headers_from_metadata(rsrc) rsp_headers['Content-Type'] = 'text/plain; charset=utf-8' uri = g.tbox.uid_to_uri(uid) if evt == RES_CREATED: rsp_code = 201 rsp_headers['Location'] = rsp_body = uri if kwargs.get('mimetype') and not kwargs.get('rdf_data'): rsp_headers['Link'] = f'<{uri}/fcr:metadata>; rel="describedby"' else: rsp_code = 204 rsp_body = '' return rsp_body, rsp_code, rsp_headers
[docs]@ldp.route('/<path:uid>', methods=['PATCH'], strict_slashes=False) @ldp.route('/', defaults={'uid': '/'}, methods=['PATCH'], strict_slashes=False) def patch_resource(uid, is_metadata=False): """ https://www.w3.org/TR/ldp/#ldpr-HTTP_PATCH Update an existing resource with a SPARQL-UPDATE payload. """ # Fist check if it's not a 404 or a 410. try: if not rsrc_api.exists(uid): return '', 404 except exc.TombstoneError as e: return _tombstone_response(e, uid) # Then process the condition headers. cond_ret = _process_cond_headers(uid, request.headers, False) if cond_ret: return cond_ret handling, _ = _set_post_put_params() rsp_headers = {'Content-Type' : 'text/plain; charset=utf-8'} if request.mimetype != 'application/sparql-update': return 'Provided content type is not a valid parsable format: {}'\ .format(request.mimetype), 415 update_str = request.get_data().decode('utf-8') local_update_str = g.tbox.localize_ext_str(update_str, nsc['fcres'][uid]) try: rsrc = rsrc_api.update(uid, local_update_str, is_metadata, handling) except (exc.ServerManagedTermError, exc.SingleSubjectError) as e: return str(e), 412 except exc.InvalidResourceError as e: return str(e), 415 else: with store.txn_ctx(): rsp_headers.update(_headers_from_metadata(rsrc)) return '', 204, rsp_headers
[docs]@ldp.route('/<path:uid>/fcr:metadata', methods=['PATCH']) def patch_resource_metadata(uid): return patch_resource(uid, True)
[docs]@ldp.route('/<path:uid>', methods=['DELETE']) def delete_resource(uid): """ Delete a resource and optionally leave a tombstone. This behaves differently from FCREPO. A tombstone indicated that the resource is no longer available at its current location, but its historic snapshots still are. Also, deleting a resource with a tombstone creates one more version snapshot of the resource prior to being deleted. In order to completely wipe out all traces of a resource, the tombstone must be deleted as well, or the ``Prefer:no-tombstone`` header can be used. The latter will forget (completely delete) the resource immediately. """ # Fist check if it's not a 404 or a 410. try: if not rsrc_api.exists(uid): return '', 404 except exc.TombstoneError as e: return _tombstone_response(e, uid) # Then process the condition headers. cond_ret = _process_cond_headers(uid, request.headers, False) if cond_ret: return cond_ret headers = std_headers.copy() if 'prefer' in request.headers: prefer = toolbox.parse_rfc7240(request.headers['prefer']) leave_tstone = 'no-tombstone' not in prefer else: leave_tstone = True rsrc_api.delete(uid, leave_tstone) return '', 204, headers
[docs]@ldp.route('/<path:uid>/fcr:tombstone', methods=['GET', 'POST', 'PUT', 'PATCH', 'DELETE']) def tombstone(uid): """ Handle all tombstone operations. The only allowed methods are POST and DELETE; any other verb will return a 405. """ try: rsrc_api.get(uid) except exc.TombstoneError as e: if request.method == 'DELETE': if e.uid == uid: rsrc_api.delete(uid, False) return '', 204 else: return _tombstone_response(e, uid) elif request.method == 'POST': if e.uid == uid: rsrc_uri = rsrc_api.resurrect(uid) headers = {'Location' : rsrc_uri} return rsrc_uri, 201, headers else: return _tombstone_response(e, uid) else: return 'Method Not Allowed.', 405 except exc.ResourceNotExistsError as e: return str(e), 404 else: return '', 404
[docs]@ldp.route('/<path:uid>/fcr:versions', methods=['POST', 'PUT']) def post_version(uid): """ Create a new resource version. """ if request.method == 'PUT': return 'Method not allowed.', 405 ver_uid = request.headers.get('slug', None) try: ver_uid = rsrc_api.create_version(uid, ver_uid) except exc.ResourceNotExistsError as e: return str(e), 404 except exc.InvalidResourceError as e: return str(e), 409 except exc.TombstoneError as e: return _tombstone_response(e, uid) else: return '', 201, {'Location': g.tbox.uid_to_uri(ver_uid)}
[docs]@ldp.route('/<path:uid>/fcr:versions/<ver_uid>', methods=['PATCH']) def patch_version(uid, ver_uid): """ Revert to a previous version. NOTE: This creates a new version snapshot. :param str uid: Resource UID. :param str ver_uid: Version UID. """ try: rsrc_api.revert_to_version(uid, ver_uid) except exc.ResourceNotExistsError as e: return str(e), 404 except exc.InvalidResourceError as e: return str(e), 409 except exc.TombstoneError as e: return _tombstone_response(e, uid) else: return '', 204
## PRIVATE METHODS ## def _best_rdf_mimetype(): """ Check if any of the 'Accept' header values provided is a RDF parsable format. """ for accept in request.accept_mimetypes: mimetype = accept[0] if mimetype in rdf_parsable_mimetypes: return mimetype return None def _negotiate_content(gr, rdf_mimetype, headers=None, **vw_kwargs): """ Return HTML or serialized RDF depending on accept headers. """ if request.accept_mimetypes.best == 'text/html': rsp = render_template( 'resource.html', gr=gr, nsc=nsc, nsm=nsm, blacklist=vw_blacklist, arrow=arrow, **vw_kwargs) mimetype = 'text/html' else: for p in vw_blacklist: gr.remove((None, p, None)) rsp = gr.serialize(format=rdf_mimetype) mimetype = rdf_mimetype return Response(rsp, 200, headers, mimetype=mimetype) def _create_args_from_req(uid): """ Set API creation method arguments from request parameters. The ``kwargs`` variable returned has two keys: either ``rdf_data`` and ``rdf_fmt`` for LDP-RS or ``stream`` and ``mimetype`` for LDP-NR. :rtype: dict """ #logger.debug('Content type: {}'.format(request.mimetype)) #logger.debug('files: {}'.format(request.files)) #logger.debug('stream: {}'.format(request.stream)) #pdb.set_trace() handling, disposition = _set_post_put_params() kwargs = {'handling': handling} if disposition: kwargs['disposition'] = disposition link_hdr = request.headers.get('Link') if link_hdr: force_ldpnr = ( nsc['ldp']['NonRDFSource'] in link_hdr and 'rel="type"' in link_hdr) else: force_ldpnr = False if request.mimetype == 'multipart/form-data': # This seems the "right" way to upload a binary file, with a # multipart/form-data MIME type and the file in the `file` # field. This however is not supported by FCREPO4. stream = request.files.get('file').stream mimetype = request.files.get('file').content_type # @TODO This will turn out useful to provide metadata # with the binary. #metadata = request.files.get('metadata').stream else: # This is a less clean way, with the file in the form body and # the request as application/x-www-form-urlencoded. # This is how FCREPO4 accepts binary uploads. stream = request.stream # @FIXME Must decide what to do with this. mimetype = request.mimetype if mimetype == 'application/x-www-form-urlencoded': mimetype = None if mimetype in rdf_parsable_mimetypes and not force_ldpnr: # If the content is RDF, localize in-repo URIs. global_rdf = stream.read() kwargs['rdf_data'] = g.tbox.localize_payload(global_rdf) kwargs['rdf_fmt'] = mimetype else: # Unspecified mimetype or force_ldpnr creates a LDP-NR. kwargs['stream'] = stream or BytesIO(b'') kwargs['mimetype'] = mimetype or 'application/octet-stream' # Check digest if requested. if 'digest' in request.headers: try: kwargs['prov_cksum_algo'], kwargs['prov_cksum'] = ( request.headers['digest'].split('=') ) except ValueError: raise exc.IndigestibleError(uid) return kwargs def _tombstone_response(e, uid): headers = { 'Link': '<{}/fcr:tombstone>; rel="hasTombstone"'.format(request.url), } if e.uid == uid else {} return str(e), 410, headers def _set_post_put_params(): """ Sets handling and content disposition for POST and PUT by parsing headers. """ handling = 'strict' if 'prefer' in request.headers: prefer = toolbox.parse_rfc7240(request.headers['prefer']) logger.debug('Parsed Prefer header: {}'.format(prefer)) if 'handling' in prefer: handling = prefer['handling']['value'] try: disposition = toolbox.parse_rfc7240( request.headers['content-disposition']) except KeyError: disposition = None return handling, disposition
[docs]def parse_repr_options(repr_options, out_headers): """ Set options to retrieve IMR. Ideally, IMR retrieval is done once per request, so all the options are set once in the `imr()` property. Representation options include: - ``embed_children``: include full resource representation of all resource children in the resource graph. - ``incl_children``: TODO - ``incl_inbound``: include inbound triples (triples whose object is this resource). - ``incl_srv_mgd``: include server-managed triples. All options above are ``False`` by default except for ``incl_srv_mgd`` which is only ``False`` if the ``return`` representation is ``minimal``. :param dict repr_options:: Options parsed from `Prefer` header. :param dict out_headers:: Response headers. """ logger.debug('Parsing retrieval options: {}'.format(repr_options)) if repr_options.get('value') == 'minimal': imr_options = { 'embed_children' : False, 'incl_children' : False, 'incl_inbound' : False, 'incl_srv_mgd' : False, } out_headers['Preference-Applied'] = 'return="minimal"' else: # Default. imr_options = { 'embed_children' : False, 'incl_children' : True, 'incl_inbound' : False, 'incl_srv_mgd' : True, } # Override defaults. if 'parameters' in repr_options: try: pref_imr_options = _valid_preferences(repr_options) include = list() omit = list() for k, v in pref_imr_options.items(): # pref_imr_options only contains requested preferences, # override the defaults for those. imr_options[k] = v # This creates Preference-Applied headers. if v: list_holder = include else: list_holder = omit list_holder.append(str(option_to_uri[k])) header_output = '' if len(include) > 0: header_output += ' include="' + ' '.join(include) + '";' if len(omit) > 0: header_output += ' omit="' + ' '.join(omit) + '";' if len(header_output) > 0: out_headers['Preference-Applied'] = 'return=representation;'\ + header_output except KeyError: # Invalid Prefer header so we disregard the entire thing. pass logger.debug('Retrieval options: {}'.format(pformat(imr_options))) return imr_options
def _preference_decision(include, omit, header): """ Determine whether a header is in include or omit but not both. :param include:: list of include preference uris :param omit:: list of omit preference uris :param header:: the uri to look for :return: True if in include only or false if in omit only. """ if str(header) in include or str(header) in omit: if str(header) in include and str(header) in omit: # You can't include and omit, so ignore it. raise KeyError('Can\'t include and omit same preference') else: return str(header) in include return None def _valid_preferences(repr_options): """ Parse the Preference header to determine which we are applying. Re-used for response Preference-Applied header. :param repr_options: The incoming Preference header. :return: list of options being applied. """ imr_options = dict() include = repr_options['parameters']['include'].split(' ') \ if 'include' in repr_options['parameters'] else [] omit = repr_options['parameters']['omit'].split(' ') \ if 'omit' in repr_options['parameters'] else [] logger.debug('Include: {}'.format(include)) logger.debug('Omit: {}'.format(omit)) distinct_representations = include.copy() distinct_representations.extend(omit) distinct_representations = set(distinct_representations) uri_to_option = {str(v): k for k, v in option_to_uri.items()} for uri in distinct_representations: # Throws KeyError if we don't support the header option = uri_to_option[uri] imr_options[option] = _preference_decision(include, omit, uri) return imr_options def _headers_from_metadata(rsrc, out_fmt='text/turtle'): """ Create a dict of headers from a metadata graph. :param lakesuperior.model.ldp.ldpr.Ldpr rsrc: Resource to extract metadata from. """ rsp_headers = defaultdict(list) digest_p = rsrc.metadata.value(nsc['premis'].hasMessageDigest) # Only add ETag and digest if output is not RDF. if digest_p: rsp_headers['ETag'], rsp_headers['Digest'] = ( _digest_headers(digest_p)) last_updated_term = rsrc.metadata.value(nsc['fcrepo'].lastModified) if last_updated_term: rsp_headers['Last-Modified'] = arrow.get(last_updated_term)\ .format('ddd, D MMM YYYY HH:mm:ss Z') for t in rsrc.ldp_types: rsp_headers['Link'].append('{};rel="type"'.format(t.n3())) if rsrc.mimetype: rsp_headers['Content-Type'] = rsrc.mimetype return rsp_headers def _digest_headers(digest): """ Format ETag and Digest headers from resource checksum. :param str digest: Resource digest. For an extracted IMR, this is the value of the ``premis:hasMessageDigest`` property. """ digest_components = digest.split(':') cksum_hex = digest_components[-1] cksum = bytearray.fromhex(cksum_hex) digest_algo = digest_components[-2] etag_str = cksum_hex digest_str = '{}={}'.format( digest_algo.upper(), b64encode(cksum).decode('ascii')) return etag_str, digest_str def _condition_hdr_match(uid, headers, safe=True): """ Conditional header evaluation for HEAD, GET, PUT and DELETE requests. Determine whether any conditional headers, and which, is/are imposed in the request (``If-Match``, ``If-None-Match``, ``If-Modified-Since``, ``If-Unmodified-Since``, or none) and what the most relevant condition evaluates to (``True`` or ``False``). `RFC 7232 <https://tools.ietf.org/html/rfc7232#section-3.1>`__ does not indicate an exact condition precedence, except that the ETag matching conditions void the timestamp-based ones. This function adopts the following precedence: - ``If-Match`` is evaluated first if present; - Else, ``If-None-Match`` is evaluated if present; - Else, ``If-Modified-Since`` and ``If-Unmodified-Since`` are evaluated if present. If both conditions are present they are both returned so they can be furher evaluated, e.g. using a logical AND to allow time-range conditions, where the two terms indicate the early and late boundary, respectively. Note that the above mentioned RFC mentions several cases in which these conditions are ignored, e.g. for a 404 in some cases, or for certain HTTP methods for ``If-Modified-Since``. This must be implemented by the calling function. :param str uid: UID of the resource requested. :param werkzeug.datastructures.EnvironHeaders headers: Incoming request headers. :param bool safe: Whether a "safe" method is being processed. Defaults to True. :rtype: dict (str, bool) :return: Dictionary whose keys are the conditional header names that have been evaluated, and whose boolean values indicate whether each condition is met. If no valid conditional header is found, an empty dict is returned. """ # ETag-based conditions. # This ignores headers with empty values. if headers.get('if-match') or headers.get('if-none-match'): cond_hdr = 'if-match' if headers.get('if-match') else 'if-none-match' # Wildcard matching for unsafe methods. Cannot be part of a list of # ETags nor be enclosed in quotes. if not safe and headers.get(cond_hdr) == '*': return {cond_hdr: (cond_hdr == 'if-match') == rsrc_api.exists(uid)} req_etags = [ et.strip('\'" ') for et in headers.get(cond_hdr).split(',')] with store.txn_ctx(): try: rsrc_meta = rsrc_api.get_metadata(uid) except exc.ResourceNotExistsError: rsrc_meta = Graph(uri=nsc['fcres'][uid]) digest_prop = rsrc_meta.value(nsc['premis'].hasMessageDigest) if digest_prop: etag, _ = _digest_headers(digest_prop) if cond_hdr == 'if-match': is_match = etag in req_etags else: is_match = etag not in req_etags else: is_match = cond_hdr == 'if-none-match' return {cond_hdr: is_match} # Timestmp-based conditions. ret = {} if headers.get('if-modified-since') or headers.get('if-unmodified-since'): try: rsrc_meta = rsrc_api.get_metadata(uid) except exc.ResourceNotExistsError: return { 'if-modified-since': False, 'if-unmodified-since': False } with store.txn_ctx(): lastmod_str = rsrc_meta.value(nsc['fcrepo'].lastModified) lastmod_ts = arrow.get(lastmod_str) # If date is not in a RFC 5322 format # (https://tools.ietf.org/html/rfc5322#section-3.3) parse_date # evaluates to None. mod_since_date = parse_date(headers.get('if-modified-since')) if mod_since_date: cond_hdr = 'if-modified-since' ret[cond_hdr] = lastmod_ts > arrow.get(mod_since_date) unmod_since_date = parse_date(headers.get('if-unmodified-since')) if unmod_since_date: cond_hdr = 'if-unmodified-since' ret[cond_hdr] = lastmod_ts < arrow.get(unmod_since_date) return ret def _process_cond_headers(uid, headers, safe=True): """ Process the outcome of the evaluation of conditional headers. This yields different response between safe methods (``HEAD``, ``GET``, etc.) and unsafe ones (``PUT``, ``DELETE``, etc. :param str uid: Resource UID. :param werkzeug.datastructures.EnvironHeaders headers: Incoming request headers. :param bool safe: Whether a "safe" method is being processed. Defaults to True. """ try: cond_match = _condition_hdr_match(uid, headers, safe) except exc.TombstoneError as e: return _tombstone_response(e, uid) if cond_match: if safe: if 'if-match' in cond_match or 'if-none-match' in cond_match: # If an expected list of tags is not matched, the response is # "Precondition Failed". For all other cases, it's "Not Modified". if not cond_match.get('if-match', True): return '', 412 if not cond_match.get('if-none-match', True): return '', 304 # The presence of an Etag-based condition, whether satisfied or not, # voids the timestamp-based conditions. elif ( not cond_match.get('if-modified-since', True) or not cond_match.get('if-unmodified-since', True)): return '', 304 else: # Note that If-Modified-Since is only evaluated for safe methods. if 'if-match' in cond_match or 'if-none-match' in cond_match: if ( not cond_match.get('if-match', True) or not cond_match.get('if-none-match', True)): return '', 412 # The presence of an Etag-based condition, whether satisfied or not, # voids the timestamp-based conditions. elif not cond_match.get('if-unmodified-since', True): return '', 412 def _parse_range_header(ranges, rsrc, headers): """ Parse a ``Range`` header and return the appropriate response. """ if len(ranges) == 1: # Single range. rng = ranges[0] logger.debug('Streaming contiguous partial content.') with open(rsrc.local_path, 'rb') as fh: size = None if rng[1] is None else rng[1] - rng[0] hdr_endbyte = ( rsrc.content_size - 1 if rng[1] is None else rng[1] - 1) fh.seek(rng[0]) out = fh.read(size) headers['Content-Range'] = \ f'bytes {rng[0]}-{hdr_endbyte} / {rsrc.content_size}' else: return make_response('Multiple ranges are not yet supported.', 501) # TODO Format the response as multipart/byteranges: # https://tools.ietf.org/html/rfc7233#section-4.1 #out = [] #with open(rsrc.local_path, 'rb') as fh: # for rng in rng_header.ranges: # fh.seek(rng[0]) # size = None if rng[1] is None else rng[1] - rng[0] # out.extend(fh.read(size)) return make_response(out, 206, headers)