repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
nitmir/django-cas-server
cas_server/views.py
FederateAuth.get_cas_client
def get_cas_client(self, request, provider, renew=False): """ return a CAS client object matching provider :param django.http.HttpRequest request: The current request object :param cas_server.models.FederatedIendityProvider provider: the user identity provider :return: The user CAS client object :rtype: :class:`federate.CASFederateValidateUser <cas_server.federate.CASFederateValidateUser>` """ # compute the current url, ignoring ticket dans provider GET parameters service_url = utils.get_current_url(request, {"ticket", "provider"}) self.service_url = service_url return CASFederateValidateUser(provider, service_url, renew=renew)
python
def get_cas_client(self, request, provider, renew=False): """ return a CAS client object matching provider :param django.http.HttpRequest request: The current request object :param cas_server.models.FederatedIendityProvider provider: the user identity provider :return: The user CAS client object :rtype: :class:`federate.CASFederateValidateUser <cas_server.federate.CASFederateValidateUser>` """ # compute the current url, ignoring ticket dans provider GET parameters service_url = utils.get_current_url(request, {"ticket", "provider"}) self.service_url = service_url return CASFederateValidateUser(provider, service_url, renew=renew)
[ "def", "get_cas_client", "(", "self", ",", "request", ",", "provider", ",", "renew", "=", "False", ")", ":", "# compute the current url, ignoring ticket dans provider GET parameters", "service_url", "=", "utils", ".", "get_current_url", "(", "request", ",", "{", "\"ticket\"", ",", "\"provider\"", "}", ")", "self", ".", "service_url", "=", "service_url", "return", "CASFederateValidateUser", "(", "provider", ",", "service_url", ",", "renew", "=", "renew", ")" ]
return a CAS client object matching provider :param django.http.HttpRequest request: The current request object :param cas_server.models.FederatedIendityProvider provider: the user identity provider :return: The user CAS client object :rtype: :class:`federate.CASFederateValidateUser <cas_server.federate.CASFederateValidateUser>`
[ "return", "a", "CAS", "client", "object", "matching", "provider" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L244-L257
train
nitmir/django-cas-server
cas_server/views.py
FederateAuth.post
def post(self, request, provider=None): """ method called on POST request :param django.http.HttpRequest request: The current request object :param unicode provider: Optional parameter. The user provider suffix. """ # if settings.CAS_FEDERATE is not True redirect to the login page if not settings.CAS_FEDERATE: logger.warning("CAS_FEDERATE is False, set it to True to use federation") return redirect("cas_server:login") # POST with a provider suffix, this is probably an SLO request. csrf is disabled for # allowing SLO requests reception try: provider = FederatedIendityProvider.objects.get(suffix=provider) auth = self.get_cas_client(request, provider) try: auth.clean_sessions(request.POST['logoutRequest']) except (KeyError, AttributeError): pass return HttpResponse("ok") # else, a User is trying to log in using an identity provider except FederatedIendityProvider.DoesNotExist: # Manually checking for csrf to protect the code below reason = CsrfViewMiddleware().process_view(request, None, (), {}) if reason is not None: # pragma: no cover (csrf checks are disabled during tests) return reason # Failed the test, stop here. form = forms.FederateSelect(request.POST) if form.is_valid(): params = utils.copy_params( request.POST, ignore={"provider", "csrfmiddlewaretoken", "ticket", "lt"} ) if params.get("renew") == "False": del params["renew"] url = utils.reverse_params( "cas_server:federateAuth", kwargs=dict(provider=form.cleaned_data["provider"].suffix), params=params ) return HttpResponseRedirect(url) else: return redirect("cas_server:login")
python
def post(self, request, provider=None): """ method called on POST request :param django.http.HttpRequest request: The current request object :param unicode provider: Optional parameter. The user provider suffix. """ # if settings.CAS_FEDERATE is not True redirect to the login page if not settings.CAS_FEDERATE: logger.warning("CAS_FEDERATE is False, set it to True to use federation") return redirect("cas_server:login") # POST with a provider suffix, this is probably an SLO request. csrf is disabled for # allowing SLO requests reception try: provider = FederatedIendityProvider.objects.get(suffix=provider) auth = self.get_cas_client(request, provider) try: auth.clean_sessions(request.POST['logoutRequest']) except (KeyError, AttributeError): pass return HttpResponse("ok") # else, a User is trying to log in using an identity provider except FederatedIendityProvider.DoesNotExist: # Manually checking for csrf to protect the code below reason = CsrfViewMiddleware().process_view(request, None, (), {}) if reason is not None: # pragma: no cover (csrf checks are disabled during tests) return reason # Failed the test, stop here. form = forms.FederateSelect(request.POST) if form.is_valid(): params = utils.copy_params( request.POST, ignore={"provider", "csrfmiddlewaretoken", "ticket", "lt"} ) if params.get("renew") == "False": del params["renew"] url = utils.reverse_params( "cas_server:federateAuth", kwargs=dict(provider=form.cleaned_data["provider"].suffix), params=params ) return HttpResponseRedirect(url) else: return redirect("cas_server:login")
[ "def", "post", "(", "self", ",", "request", ",", "provider", "=", "None", ")", ":", "# if settings.CAS_FEDERATE is not True redirect to the login page", "if", "not", "settings", ".", "CAS_FEDERATE", ":", "logger", ".", "warning", "(", "\"CAS_FEDERATE is False, set it to True to use federation\"", ")", "return", "redirect", "(", "\"cas_server:login\"", ")", "# POST with a provider suffix, this is probably an SLO request. csrf is disabled for", "# allowing SLO requests reception", "try", ":", "provider", "=", "FederatedIendityProvider", ".", "objects", ".", "get", "(", "suffix", "=", "provider", ")", "auth", "=", "self", ".", "get_cas_client", "(", "request", ",", "provider", ")", "try", ":", "auth", ".", "clean_sessions", "(", "request", ".", "POST", "[", "'logoutRequest'", "]", ")", "except", "(", "KeyError", ",", "AttributeError", ")", ":", "pass", "return", "HttpResponse", "(", "\"ok\"", ")", "# else, a User is trying to log in using an identity provider", "except", "FederatedIendityProvider", ".", "DoesNotExist", ":", "# Manually checking for csrf to protect the code below", "reason", "=", "CsrfViewMiddleware", "(", ")", ".", "process_view", "(", "request", ",", "None", ",", "(", ")", ",", "{", "}", ")", "if", "reason", "is", "not", "None", ":", "# pragma: no cover (csrf checks are disabled during tests)", "return", "reason", "# Failed the test, stop here.", "form", "=", "forms", ".", "FederateSelect", "(", "request", ".", "POST", ")", "if", "form", ".", "is_valid", "(", ")", ":", "params", "=", "utils", ".", "copy_params", "(", "request", ".", "POST", ",", "ignore", "=", "{", "\"provider\"", ",", "\"csrfmiddlewaretoken\"", ",", "\"ticket\"", ",", "\"lt\"", "}", ")", "if", "params", ".", "get", "(", "\"renew\"", ")", "==", "\"False\"", ":", "del", "params", "[", "\"renew\"", "]", "url", "=", "utils", ".", "reverse_params", "(", "\"cas_server:federateAuth\"", ",", "kwargs", "=", "dict", "(", "provider", "=", "form", ".", "cleaned_data", "[", "\"provider\"", "]", ".", "suffix", ")", ",", "params", "=", "params", ")", "return", "HttpResponseRedirect", "(", "url", ")", "else", ":", "return", "redirect", "(", "\"cas_server:login\"", ")" ]
method called on POST request :param django.http.HttpRequest request: The current request object :param unicode provider: Optional parameter. The user provider suffix.
[ "method", "called", "on", "POST", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L259-L301
train
nitmir/django-cas-server
cas_server/views.py
FederateAuth.get
def get(self, request, provider=None): """ method called on GET request :param django.http.HttpRequestself. request: The current request object :param unicode provider: Optional parameter. The user provider suffix. """ # if settings.CAS_FEDERATE is not True redirect to the login page if not settings.CAS_FEDERATE: logger.warning("CAS_FEDERATE is False, set it to True to use federation") return redirect("cas_server:login") renew = bool(request.GET.get('renew') and request.GET['renew'] != "False") # Is the user is already authenticated, no need to request authentication to the user # identity provider. if self.request.session.get("authenticated") and not renew: logger.warning("User already authenticated, dropping federated authentication request") return redirect("cas_server:login") try: # get the identity provider from its suffix provider = FederatedIendityProvider.objects.get(suffix=provider) # get a CAS client for the user identity provider auth = self.get_cas_client(request, provider, renew) # if no ticket submited, redirect to the identity provider CAS login page if 'ticket' not in request.GET: logger.info("Trying to authenticate %s again" % auth.provider.server_url) return HttpResponseRedirect(auth.get_login_url()) else: ticket = request.GET['ticket'] try: # if the ticket validation succeed if auth.verify_ticket(ticket): logger.info( "Got a valid ticket for %s from %s" % ( auth.username, auth.provider.server_url ) ) params = utils.copy_params(request.GET, ignore={"ticket", "remember"}) request.session["federate_username"] = auth.federated_username request.session["federate_ticket"] = ticket auth.register_slo( auth.federated_username, request.session.session_key, ticket ) # redirect to the the login page for the user to become authenticated # thanks to the `federate_username` and `federate_ticket` session parameters url = utils.reverse_params("cas_server:login", params) response = HttpResponseRedirect(url) # If the user has checked "remember my identity provider" store it in a # cookie if request.GET.get("remember"): max_age = settings.CAS_FEDERATE_REMEMBER_TIMEOUT utils.set_cookie( response, "remember_provider", provider.suffix, max_age ) return response # else redirect to the identity provider CAS login page else: logger.info( ( "Got an invalid ticket %s from %s for service %s. " "Retrying authentication" ) % ( ticket, auth.provider.server_url, self.service_url ) ) return HttpResponseRedirect(auth.get_login_url()) # both xml.etree.ElementTree and lxml.etree exceptions inherit from SyntaxError except SyntaxError as error: messages.add_message( request, messages.ERROR, _( u"Invalid response from your identity provider CAS upon " u"ticket %(ticket)s validation: %(error)r" ) % {'ticket': ticket, 'error': error} ) response = redirect("cas_server:login") response.delete_cookie("remember_provider") return response except FederatedIendityProvider.DoesNotExist: logger.warning("Identity provider suffix %s not found" % provider) # if the identity provider is not found, redirect to the login page return redirect("cas_server:login")
python
def get(self, request, provider=None): """ method called on GET request :param django.http.HttpRequestself. request: The current request object :param unicode provider: Optional parameter. The user provider suffix. """ # if settings.CAS_FEDERATE is not True redirect to the login page if not settings.CAS_FEDERATE: logger.warning("CAS_FEDERATE is False, set it to True to use federation") return redirect("cas_server:login") renew = bool(request.GET.get('renew') and request.GET['renew'] != "False") # Is the user is already authenticated, no need to request authentication to the user # identity provider. if self.request.session.get("authenticated") and not renew: logger.warning("User already authenticated, dropping federated authentication request") return redirect("cas_server:login") try: # get the identity provider from its suffix provider = FederatedIendityProvider.objects.get(suffix=provider) # get a CAS client for the user identity provider auth = self.get_cas_client(request, provider, renew) # if no ticket submited, redirect to the identity provider CAS login page if 'ticket' not in request.GET: logger.info("Trying to authenticate %s again" % auth.provider.server_url) return HttpResponseRedirect(auth.get_login_url()) else: ticket = request.GET['ticket'] try: # if the ticket validation succeed if auth.verify_ticket(ticket): logger.info( "Got a valid ticket for %s from %s" % ( auth.username, auth.provider.server_url ) ) params = utils.copy_params(request.GET, ignore={"ticket", "remember"}) request.session["federate_username"] = auth.federated_username request.session["federate_ticket"] = ticket auth.register_slo( auth.federated_username, request.session.session_key, ticket ) # redirect to the the login page for the user to become authenticated # thanks to the `federate_username` and `federate_ticket` session parameters url = utils.reverse_params("cas_server:login", params) response = HttpResponseRedirect(url) # If the user has checked "remember my identity provider" store it in a # cookie if request.GET.get("remember"): max_age = settings.CAS_FEDERATE_REMEMBER_TIMEOUT utils.set_cookie( response, "remember_provider", provider.suffix, max_age ) return response # else redirect to the identity provider CAS login page else: logger.info( ( "Got an invalid ticket %s from %s for service %s. " "Retrying authentication" ) % ( ticket, auth.provider.server_url, self.service_url ) ) return HttpResponseRedirect(auth.get_login_url()) # both xml.etree.ElementTree and lxml.etree exceptions inherit from SyntaxError except SyntaxError as error: messages.add_message( request, messages.ERROR, _( u"Invalid response from your identity provider CAS upon " u"ticket %(ticket)s validation: %(error)r" ) % {'ticket': ticket, 'error': error} ) response = redirect("cas_server:login") response.delete_cookie("remember_provider") return response except FederatedIendityProvider.DoesNotExist: logger.warning("Identity provider suffix %s not found" % provider) # if the identity provider is not found, redirect to the login page return redirect("cas_server:login")
[ "def", "get", "(", "self", ",", "request", ",", "provider", "=", "None", ")", ":", "# if settings.CAS_FEDERATE is not True redirect to the login page", "if", "not", "settings", ".", "CAS_FEDERATE", ":", "logger", ".", "warning", "(", "\"CAS_FEDERATE is False, set it to True to use federation\"", ")", "return", "redirect", "(", "\"cas_server:login\"", ")", "renew", "=", "bool", "(", "request", ".", "GET", ".", "get", "(", "'renew'", ")", "and", "request", ".", "GET", "[", "'renew'", "]", "!=", "\"False\"", ")", "# Is the user is already authenticated, no need to request authentication to the user", "# identity provider.", "if", "self", ".", "request", ".", "session", ".", "get", "(", "\"authenticated\"", ")", "and", "not", "renew", ":", "logger", ".", "warning", "(", "\"User already authenticated, dropping federated authentication request\"", ")", "return", "redirect", "(", "\"cas_server:login\"", ")", "try", ":", "# get the identity provider from its suffix", "provider", "=", "FederatedIendityProvider", ".", "objects", ".", "get", "(", "suffix", "=", "provider", ")", "# get a CAS client for the user identity provider", "auth", "=", "self", ".", "get_cas_client", "(", "request", ",", "provider", ",", "renew", ")", "# if no ticket submited, redirect to the identity provider CAS login page", "if", "'ticket'", "not", "in", "request", ".", "GET", ":", "logger", ".", "info", "(", "\"Trying to authenticate %s again\"", "%", "auth", ".", "provider", ".", "server_url", ")", "return", "HttpResponseRedirect", "(", "auth", ".", "get_login_url", "(", ")", ")", "else", ":", "ticket", "=", "request", ".", "GET", "[", "'ticket'", "]", "try", ":", "# if the ticket validation succeed", "if", "auth", ".", "verify_ticket", "(", "ticket", ")", ":", "logger", ".", "info", "(", "\"Got a valid ticket for %s from %s\"", "%", "(", "auth", ".", "username", ",", "auth", ".", "provider", ".", "server_url", ")", ")", "params", "=", "utils", ".", "copy_params", "(", "request", ".", "GET", ",", "ignore", "=", "{", "\"ticket\"", ",", "\"remember\"", "}", ")", "request", ".", "session", "[", "\"federate_username\"", "]", "=", "auth", ".", "federated_username", "request", ".", "session", "[", "\"federate_ticket\"", "]", "=", "ticket", "auth", ".", "register_slo", "(", "auth", ".", "federated_username", ",", "request", ".", "session", ".", "session_key", ",", "ticket", ")", "# redirect to the the login page for the user to become authenticated", "# thanks to the `federate_username` and `federate_ticket` session parameters", "url", "=", "utils", ".", "reverse_params", "(", "\"cas_server:login\"", ",", "params", ")", "response", "=", "HttpResponseRedirect", "(", "url", ")", "# If the user has checked \"remember my identity provider\" store it in a", "# cookie", "if", "request", ".", "GET", ".", "get", "(", "\"remember\"", ")", ":", "max_age", "=", "settings", ".", "CAS_FEDERATE_REMEMBER_TIMEOUT", "utils", ".", "set_cookie", "(", "response", ",", "\"remember_provider\"", ",", "provider", ".", "suffix", ",", "max_age", ")", "return", "response", "# else redirect to the identity provider CAS login page", "else", ":", "logger", ".", "info", "(", "(", "\"Got an invalid ticket %s from %s for service %s. \"", "\"Retrying authentication\"", ")", "%", "(", "ticket", ",", "auth", ".", "provider", ".", "server_url", ",", "self", ".", "service_url", ")", ")", "return", "HttpResponseRedirect", "(", "auth", ".", "get_login_url", "(", ")", ")", "# both xml.etree.ElementTree and lxml.etree exceptions inherit from SyntaxError", "except", "SyntaxError", "as", "error", ":", "messages", ".", "add_message", "(", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u\"Invalid response from your identity provider CAS upon \"", "u\"ticket %(ticket)s validation: %(error)r\"", ")", "%", "{", "'ticket'", ":", "ticket", ",", "'error'", ":", "error", "}", ")", "response", "=", "redirect", "(", "\"cas_server:login\"", ")", "response", ".", "delete_cookie", "(", "\"remember_provider\"", ")", "return", "response", "except", "FederatedIendityProvider", ".", "DoesNotExist", ":", "logger", ".", "warning", "(", "\"Identity provider suffix %s not found\"", "%", "provider", ")", "# if the identity provider is not found, redirect to the login page", "return", "redirect", "(", "\"cas_server:login\"", ")" ]
method called on GET request :param django.http.HttpRequestself. request: The current request object :param unicode provider: Optional parameter. The user provider suffix.
[ "method", "called", "on", "GET", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L303-L392
train
nitmir/django-cas-server
cas_server/views.py
LoginView.init_post
def init_post(self, request): """ Initialize POST received parameters :param django.http.HttpRequest request: The current request object """ self.request = request self.service = request.POST.get('service') self.renew = bool(request.POST.get('renew') and request.POST['renew'] != "False") self.gateway = request.POST.get('gateway') self.method = request.POST.get('method') self.ajax = settings.CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request.META if request.POST.get('warned') and request.POST['warned'] != "False": self.warned = True self.warn = request.POST.get('warn') if settings.CAS_FEDERATE: self.username = request.POST.get('username') # in federated mode, the valdated indentity provider CAS ticket is used as password self.ticket = request.POST.get('password')
python
def init_post(self, request): """ Initialize POST received parameters :param django.http.HttpRequest request: The current request object """ self.request = request self.service = request.POST.get('service') self.renew = bool(request.POST.get('renew') and request.POST['renew'] != "False") self.gateway = request.POST.get('gateway') self.method = request.POST.get('method') self.ajax = settings.CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request.META if request.POST.get('warned') and request.POST['warned'] != "False": self.warned = True self.warn = request.POST.get('warn') if settings.CAS_FEDERATE: self.username = request.POST.get('username') # in federated mode, the valdated indentity provider CAS ticket is used as password self.ticket = request.POST.get('password')
[ "def", "init_post", "(", "self", ",", "request", ")", ":", "self", ".", "request", "=", "request", "self", ".", "service", "=", "request", ".", "POST", ".", "get", "(", "'service'", ")", "self", ".", "renew", "=", "bool", "(", "request", ".", "POST", ".", "get", "(", "'renew'", ")", "and", "request", ".", "POST", "[", "'renew'", "]", "!=", "\"False\"", ")", "self", ".", "gateway", "=", "request", ".", "POST", ".", "get", "(", "'gateway'", ")", "self", ".", "method", "=", "request", ".", "POST", ".", "get", "(", "'method'", ")", "self", ".", "ajax", "=", "settings", ".", "CAS_ENABLE_AJAX_AUTH", "and", "'HTTP_X_AJAX'", "in", "request", ".", "META", "if", "request", ".", "POST", ".", "get", "(", "'warned'", ")", "and", "request", ".", "POST", "[", "'warned'", "]", "!=", "\"False\"", ":", "self", ".", "warned", "=", "True", "self", ".", "warn", "=", "request", ".", "POST", ".", "get", "(", "'warn'", ")", "if", "settings", ".", "CAS_FEDERATE", ":", "self", ".", "username", "=", "request", ".", "POST", ".", "get", "(", "'username'", ")", "# in federated mode, the valdated indentity provider CAS ticket is used as password", "self", ".", "ticket", "=", "request", ".", "POST", ".", "get", "(", "'password'", ")" ]
Initialize POST received parameters :param django.http.HttpRequest request: The current request object
[ "Initialize", "POST", "received", "parameters" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L442-L460
train
nitmir/django-cas-server
cas_server/views.py
LoginView.gen_lt
def gen_lt(self): """Generate a new LoginTicket and add it to the list of valid LT for the user""" self.request.session['lt'] = self.request.session.get('lt', []) + [utils.gen_lt()] if len(self.request.session['lt']) > 100: self.request.session['lt'] = self.request.session['lt'][-100:]
python
def gen_lt(self): """Generate a new LoginTicket and add it to the list of valid LT for the user""" self.request.session['lt'] = self.request.session.get('lt', []) + [utils.gen_lt()] if len(self.request.session['lt']) > 100: self.request.session['lt'] = self.request.session['lt'][-100:]
[ "def", "gen_lt", "(", "self", ")", ":", "self", ".", "request", ".", "session", "[", "'lt'", "]", "=", "self", ".", "request", ".", "session", ".", "get", "(", "'lt'", ",", "[", "]", ")", "+", "[", "utils", ".", "gen_lt", "(", ")", "]", "if", "len", "(", "self", ".", "request", ".", "session", "[", "'lt'", "]", ")", ">", "100", ":", "self", ".", "request", ".", "session", "[", "'lt'", "]", "=", "self", ".", "request", ".", "session", "[", "'lt'", "]", "[", "-", "100", ":", "]" ]
Generate a new LoginTicket and add it to the list of valid LT for the user
[ "Generate", "a", "new", "LoginTicket", "and", "add", "it", "to", "the", "list", "of", "valid", "LT", "for", "the", "user" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L462-L466
train
nitmir/django-cas-server
cas_server/views.py
LoginView.check_lt
def check_lt(self): """ Check is the POSTed LoginTicket is valid, if yes invalide it :return: ``True`` if the LoginTicket is valid, ``False`` otherwise :rtype: bool """ # save LT for later check lt_valid = self.request.session.get('lt', []) lt_send = self.request.POST.get('lt') # generate a new LT (by posting the LT has been consumed) self.gen_lt() # check if send LT is valid if lt_send not in lt_valid: return False else: self.request.session['lt'].remove(lt_send) # we need to redo the affectation for django to detect that the list has changed # and for its new value to be store in the session self.request.session['lt'] = self.request.session['lt'] return True
python
def check_lt(self): """ Check is the POSTed LoginTicket is valid, if yes invalide it :return: ``True`` if the LoginTicket is valid, ``False`` otherwise :rtype: bool """ # save LT for later check lt_valid = self.request.session.get('lt', []) lt_send = self.request.POST.get('lt') # generate a new LT (by posting the LT has been consumed) self.gen_lt() # check if send LT is valid if lt_send not in lt_valid: return False else: self.request.session['lt'].remove(lt_send) # we need to redo the affectation for django to detect that the list has changed # and for its new value to be store in the session self.request.session['lt'] = self.request.session['lt'] return True
[ "def", "check_lt", "(", "self", ")", ":", "# save LT for later check", "lt_valid", "=", "self", ".", "request", ".", "session", ".", "get", "(", "'lt'", ",", "[", "]", ")", "lt_send", "=", "self", ".", "request", ".", "POST", ".", "get", "(", "'lt'", ")", "# generate a new LT (by posting the LT has been consumed)", "self", ".", "gen_lt", "(", ")", "# check if send LT is valid", "if", "lt_send", "not", "in", "lt_valid", ":", "return", "False", "else", ":", "self", ".", "request", ".", "session", "[", "'lt'", "]", ".", "remove", "(", "lt_send", ")", "# we need to redo the affectation for django to detect that the list has changed", "# and for its new value to be store in the session", "self", ".", "request", ".", "session", "[", "'lt'", "]", "=", "self", ".", "request", ".", "session", "[", "'lt'", "]", "return", "True" ]
Check is the POSTed LoginTicket is valid, if yes invalide it :return: ``True`` if the LoginTicket is valid, ``False`` otherwise :rtype: bool
[ "Check", "is", "the", "POSTed", "LoginTicket", "is", "valid", "if", "yes", "invalide", "it" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L468-L488
train
nitmir/django-cas-server
cas_server/views.py
LoginView.init_get
def init_get(self, request): """ Initialize GET received parameters :param django.http.HttpRequest request: The current request object """ self.request = request self.service = request.GET.get('service') self.renew = bool(request.GET.get('renew') and request.GET['renew'] != "False") self.gateway = request.GET.get('gateway') self.method = request.GET.get('method') self.ajax = settings.CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request.META self.warn = request.GET.get('warn') if settings.CAS_FEDERATE: # here username and ticket are fetch from the session after a redirection from # FederateAuth.get self.username = request.session.get("federate_username") self.ticket = request.session.get("federate_ticket") if self.username: del request.session["federate_username"] if self.ticket: del request.session["federate_ticket"]
python
def init_get(self, request): """ Initialize GET received parameters :param django.http.HttpRequest request: The current request object """ self.request = request self.service = request.GET.get('service') self.renew = bool(request.GET.get('renew') and request.GET['renew'] != "False") self.gateway = request.GET.get('gateway') self.method = request.GET.get('method') self.ajax = settings.CAS_ENABLE_AJAX_AUTH and 'HTTP_X_AJAX' in request.META self.warn = request.GET.get('warn') if settings.CAS_FEDERATE: # here username and ticket are fetch from the session after a redirection from # FederateAuth.get self.username = request.session.get("federate_username") self.ticket = request.session.get("federate_ticket") if self.username: del request.session["federate_username"] if self.ticket: del request.session["federate_ticket"]
[ "def", "init_get", "(", "self", ",", "request", ")", ":", "self", ".", "request", "=", "request", "self", ".", "service", "=", "request", ".", "GET", ".", "get", "(", "'service'", ")", "self", ".", "renew", "=", "bool", "(", "request", ".", "GET", ".", "get", "(", "'renew'", ")", "and", "request", ".", "GET", "[", "'renew'", "]", "!=", "\"False\"", ")", "self", ".", "gateway", "=", "request", ".", "GET", ".", "get", "(", "'gateway'", ")", "self", ".", "method", "=", "request", ".", "GET", ".", "get", "(", "'method'", ")", "self", ".", "ajax", "=", "settings", ".", "CAS_ENABLE_AJAX_AUTH", "and", "'HTTP_X_AJAX'", "in", "request", ".", "META", "self", ".", "warn", "=", "request", ".", "GET", ".", "get", "(", "'warn'", ")", "if", "settings", ".", "CAS_FEDERATE", ":", "# here username and ticket are fetch from the session after a redirection from", "# FederateAuth.get", "self", ".", "username", "=", "request", ".", "session", ".", "get", "(", "\"federate_username\"", ")", "self", ".", "ticket", "=", "request", ".", "session", ".", "get", "(", "\"federate_ticket\"", ")", "if", "self", ".", "username", ":", "del", "request", ".", "session", "[", "\"federate_username\"", "]", "if", "self", ".", "ticket", ":", "del", "request", ".", "session", "[", "\"federate_ticket\"", "]" ]
Initialize GET received parameters :param django.http.HttpRequest request: The current request object
[ "Initialize", "GET", "received", "parameters" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L583-L604
train
nitmir/django-cas-server
cas_server/views.py
LoginView.process_get
def process_get(self): """ Analyse the GET request :return: * :attr:`USER_NOT_AUTHENTICATED` if the user is not authenticated or is requesting for authentication renewal * :attr:`USER_AUTHENTICATED` if the user is authenticated and is not requesting for authentication renewal :rtype: int """ # generate a new LT self.gen_lt() if not self.request.session.get("authenticated") or self.renew: # authentication will be needed, initialize the form to use self.init_form() return self.USER_NOT_AUTHENTICATED return self.USER_AUTHENTICATED
python
def process_get(self): """ Analyse the GET request :return: * :attr:`USER_NOT_AUTHENTICATED` if the user is not authenticated or is requesting for authentication renewal * :attr:`USER_AUTHENTICATED` if the user is authenticated and is not requesting for authentication renewal :rtype: int """ # generate a new LT self.gen_lt() if not self.request.session.get("authenticated") or self.renew: # authentication will be needed, initialize the form to use self.init_form() return self.USER_NOT_AUTHENTICATED return self.USER_AUTHENTICATED
[ "def", "process_get", "(", "self", ")", ":", "# generate a new LT", "self", ".", "gen_lt", "(", ")", "if", "not", "self", ".", "request", ".", "session", ".", "get", "(", "\"authenticated\"", ")", "or", "self", ".", "renew", ":", "# authentication will be needed, initialize the form to use", "self", ".", "init_form", "(", ")", "return", "self", ".", "USER_NOT_AUTHENTICATED", "return", "self", ".", "USER_AUTHENTICATED" ]
Analyse the GET request :return: * :attr:`USER_NOT_AUTHENTICATED` if the user is not authenticated or is requesting for authentication renewal * :attr:`USER_AUTHENTICATED` if the user is authenticated and is not requesting for authentication renewal :rtype: int
[ "Analyse", "the", "GET", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L619-L636
train
nitmir/django-cas-server
cas_server/views.py
LoginView.init_form
def init_form(self, values=None): """ Initialization of the good form depending of POST and GET parameters :param django.http.QueryDict values: A POST or GET QueryDict """ if values: values = values.copy() values['lt'] = self.request.session['lt'][-1] form_initial = { 'service': self.service, 'method': self.method, 'warn': ( self.warn or self.request.session.get("warn") or self.request.COOKIES.get('warn') ), 'lt': self.request.session['lt'][-1], 'renew': self.renew } if settings.CAS_FEDERATE: if self.username and self.ticket: form_initial['username'] = self.username form_initial['password'] = self.ticket form_initial['ticket'] = self.ticket self.form = forms.FederateUserCredential( values, initial=form_initial ) else: self.form = forms.FederateSelect(values, initial=form_initial) else: self.form = forms.UserCredential( values, initial=form_initial )
python
def init_form(self, values=None): """ Initialization of the good form depending of POST and GET parameters :param django.http.QueryDict values: A POST or GET QueryDict """ if values: values = values.copy() values['lt'] = self.request.session['lt'][-1] form_initial = { 'service': self.service, 'method': self.method, 'warn': ( self.warn or self.request.session.get("warn") or self.request.COOKIES.get('warn') ), 'lt': self.request.session['lt'][-1], 'renew': self.renew } if settings.CAS_FEDERATE: if self.username and self.ticket: form_initial['username'] = self.username form_initial['password'] = self.ticket form_initial['ticket'] = self.ticket self.form = forms.FederateUserCredential( values, initial=form_initial ) else: self.form = forms.FederateSelect(values, initial=form_initial) else: self.form = forms.UserCredential( values, initial=form_initial )
[ "def", "init_form", "(", "self", ",", "values", "=", "None", ")", ":", "if", "values", ":", "values", "=", "values", ".", "copy", "(", ")", "values", "[", "'lt'", "]", "=", "self", ".", "request", ".", "session", "[", "'lt'", "]", "[", "-", "1", "]", "form_initial", "=", "{", "'service'", ":", "self", ".", "service", ",", "'method'", ":", "self", ".", "method", ",", "'warn'", ":", "(", "self", ".", "warn", "or", "self", ".", "request", ".", "session", ".", "get", "(", "\"warn\"", ")", "or", "self", ".", "request", ".", "COOKIES", ".", "get", "(", "'warn'", ")", ")", ",", "'lt'", ":", "self", ".", "request", ".", "session", "[", "'lt'", "]", "[", "-", "1", "]", ",", "'renew'", ":", "self", ".", "renew", "}", "if", "settings", ".", "CAS_FEDERATE", ":", "if", "self", ".", "username", "and", "self", ".", "ticket", ":", "form_initial", "[", "'username'", "]", "=", "self", ".", "username", "form_initial", "[", "'password'", "]", "=", "self", ".", "ticket", "form_initial", "[", "'ticket'", "]", "=", "self", ".", "ticket", "self", ".", "form", "=", "forms", ".", "FederateUserCredential", "(", "values", ",", "initial", "=", "form_initial", ")", "else", ":", "self", ".", "form", "=", "forms", ".", "FederateSelect", "(", "values", ",", "initial", "=", "form_initial", ")", "else", ":", "self", ".", "form", "=", "forms", ".", "UserCredential", "(", "values", ",", "initial", "=", "form_initial", ")" ]
Initialization of the good form depending of POST and GET parameters :param django.http.QueryDict values: A POST or GET QueryDict
[ "Initialization", "of", "the", "good", "form", "depending", "of", "POST", "and", "GET", "parameters" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L638-L671
train
nitmir/django-cas-server
cas_server/views.py
LoginView.service_login
def service_login(self): """ Perform login against a service :return: * The rendering of the ``settings.CAS_WARN_TEMPLATE`` if the user asked to be warned before ticket emission and has not yep been warned. * The redirection to the service URL with a ticket GET parameter * The redirection to the service URL without a ticket if ticket generation failed and the :attr:`gateway` attribute is set * The rendering of the ``settings.CAS_LOGGED_TEMPLATE`` template with some error messages if the ticket generation failed (e.g: user not allowed). :rtype: django.http.HttpResponse """ try: # is the service allowed service_pattern = ServicePattern.validate(self.service) # is the current user allowed on this service service_pattern.check_user(self.user) # if the user has asked to be warned before any login to a service if self.request.session.get("warn", True) and not self.warned: messages.add_message( self.request, messages.WARNING, _(u"Authentication has been required by service %(name)s (%(url)s)") % {'name': service_pattern.name, 'url': self.service} ) if self.ajax: data = {"status": "error", "detail": "confirmation needed"} return json_response(self.request, data) else: warn_form = forms.WarnForm(initial={ 'service': self.service, 'renew': self.renew, 'gateway': self.gateway, 'method': self.method, 'warned': True, 'lt': self.request.session['lt'][-1] }) return render( self.request, settings.CAS_WARN_TEMPLATE, utils.context({'form': warn_form}) ) else: # redirect, using method ? list(messages.get_messages(self.request)) # clean messages before leaving django redirect_url = self.user.get_service_url( self.service, service_pattern, renew=self.renewed ) if not self.ajax: return HttpResponseRedirect(redirect_url) else: data = {"status": "success", "detail": "auth", "url": redirect_url} return json_response(self.request, data) except ServicePattern.DoesNotExist: error = 1 messages.add_message( self.request, messages.ERROR, _(u'Service %(url)s not allowed.') % {'url': self.service} ) except models.BadUsername: error = 2 messages.add_message( self.request, messages.ERROR, _(u"Username not allowed") ) except models.BadFilter: error = 3 messages.add_message( self.request, messages.ERROR, _(u"User characteristics not allowed") ) except models.UserFieldNotDefined: error = 4 messages.add_message( self.request, messages.ERROR, _(u"The attribute %(field)s is needed to use" u" that service") % {'field': service_pattern.user_field} ) # if gateway is set and auth failed redirect to the service without authentication if self.gateway and not self.ajax: list(messages.get_messages(self.request)) # clean messages before leaving django return HttpResponseRedirect(self.service) if not self.ajax: return render( self.request, settings.CAS_LOGGED_TEMPLATE, utils.context({'session': self.request.session}) ) else: data = {"status": "error", "detail": "auth", "code": error} return json_response(self.request, data)
python
def service_login(self): """ Perform login against a service :return: * The rendering of the ``settings.CAS_WARN_TEMPLATE`` if the user asked to be warned before ticket emission and has not yep been warned. * The redirection to the service URL with a ticket GET parameter * The redirection to the service URL without a ticket if ticket generation failed and the :attr:`gateway` attribute is set * The rendering of the ``settings.CAS_LOGGED_TEMPLATE`` template with some error messages if the ticket generation failed (e.g: user not allowed). :rtype: django.http.HttpResponse """ try: # is the service allowed service_pattern = ServicePattern.validate(self.service) # is the current user allowed on this service service_pattern.check_user(self.user) # if the user has asked to be warned before any login to a service if self.request.session.get("warn", True) and not self.warned: messages.add_message( self.request, messages.WARNING, _(u"Authentication has been required by service %(name)s (%(url)s)") % {'name': service_pattern.name, 'url': self.service} ) if self.ajax: data = {"status": "error", "detail": "confirmation needed"} return json_response(self.request, data) else: warn_form = forms.WarnForm(initial={ 'service': self.service, 'renew': self.renew, 'gateway': self.gateway, 'method': self.method, 'warned': True, 'lt': self.request.session['lt'][-1] }) return render( self.request, settings.CAS_WARN_TEMPLATE, utils.context({'form': warn_form}) ) else: # redirect, using method ? list(messages.get_messages(self.request)) # clean messages before leaving django redirect_url = self.user.get_service_url( self.service, service_pattern, renew=self.renewed ) if not self.ajax: return HttpResponseRedirect(redirect_url) else: data = {"status": "success", "detail": "auth", "url": redirect_url} return json_response(self.request, data) except ServicePattern.DoesNotExist: error = 1 messages.add_message( self.request, messages.ERROR, _(u'Service %(url)s not allowed.') % {'url': self.service} ) except models.BadUsername: error = 2 messages.add_message( self.request, messages.ERROR, _(u"Username not allowed") ) except models.BadFilter: error = 3 messages.add_message( self.request, messages.ERROR, _(u"User characteristics not allowed") ) except models.UserFieldNotDefined: error = 4 messages.add_message( self.request, messages.ERROR, _(u"The attribute %(field)s is needed to use" u" that service") % {'field': service_pattern.user_field} ) # if gateway is set and auth failed redirect to the service without authentication if self.gateway and not self.ajax: list(messages.get_messages(self.request)) # clean messages before leaving django return HttpResponseRedirect(self.service) if not self.ajax: return render( self.request, settings.CAS_LOGGED_TEMPLATE, utils.context({'session': self.request.session}) ) else: data = {"status": "error", "detail": "auth", "code": error} return json_response(self.request, data)
[ "def", "service_login", "(", "self", ")", ":", "try", ":", "# is the service allowed", "service_pattern", "=", "ServicePattern", ".", "validate", "(", "self", ".", "service", ")", "# is the current user allowed on this service", "service_pattern", ".", "check_user", "(", "self", ".", "user", ")", "# if the user has asked to be warned before any login to a service", "if", "self", ".", "request", ".", "session", ".", "get", "(", "\"warn\"", ",", "True", ")", "and", "not", "self", ".", "warned", ":", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "WARNING", ",", "_", "(", "u\"Authentication has been required by service %(name)s (%(url)s)\"", ")", "%", "{", "'name'", ":", "service_pattern", ".", "name", ",", "'url'", ":", "self", ".", "service", "}", ")", "if", "self", ".", "ajax", ":", "data", "=", "{", "\"status\"", ":", "\"error\"", ",", "\"detail\"", ":", "\"confirmation needed\"", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")", "else", ":", "warn_form", "=", "forms", ".", "WarnForm", "(", "initial", "=", "{", "'service'", ":", "self", ".", "service", ",", "'renew'", ":", "self", ".", "renew", ",", "'gateway'", ":", "self", ".", "gateway", ",", "'method'", ":", "self", ".", "method", ",", "'warned'", ":", "True", ",", "'lt'", ":", "self", ".", "request", ".", "session", "[", "'lt'", "]", "[", "-", "1", "]", "}", ")", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_WARN_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'form'", ":", "warn_form", "}", ")", ")", "else", ":", "# redirect, using method ?", "list", "(", "messages", ".", "get_messages", "(", "self", ".", "request", ")", ")", "# clean messages before leaving django", "redirect_url", "=", "self", ".", "user", ".", "get_service_url", "(", "self", ".", "service", ",", "service_pattern", ",", "renew", "=", "self", ".", "renewed", ")", "if", "not", "self", ".", "ajax", ":", "return", "HttpResponseRedirect", "(", "redirect_url", ")", "else", ":", "data", "=", "{", "\"status\"", ":", "\"success\"", ",", "\"detail\"", ":", "\"auth\"", ",", "\"url\"", ":", "redirect_url", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")", "except", "ServicePattern", ".", "DoesNotExist", ":", "error", "=", "1", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u'Service %(url)s not allowed.'", ")", "%", "{", "'url'", ":", "self", ".", "service", "}", ")", "except", "models", ".", "BadUsername", ":", "error", "=", "2", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u\"Username not allowed\"", ")", ")", "except", "models", ".", "BadFilter", ":", "error", "=", "3", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u\"User characteristics not allowed\"", ")", ")", "except", "models", ".", "UserFieldNotDefined", ":", "error", "=", "4", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u\"The attribute %(field)s is needed to use\"", "u\" that service\"", ")", "%", "{", "'field'", ":", "service_pattern", ".", "user_field", "}", ")", "# if gateway is set and auth failed redirect to the service without authentication", "if", "self", ".", "gateway", "and", "not", "self", ".", "ajax", ":", "list", "(", "messages", ".", "get_messages", "(", "self", ".", "request", ")", ")", "# clean messages before leaving django", "return", "HttpResponseRedirect", "(", "self", ".", "service", ")", "if", "not", "self", ".", "ajax", ":", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_LOGGED_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'session'", ":", "self", ".", "request", ".", "session", "}", ")", ")", "else", ":", "data", "=", "{", "\"status\"", ":", "\"error\"", ",", "\"detail\"", ":", "\"auth\"", ",", "\"code\"", ":", "error", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")" ]
Perform login against a service :return: * The rendering of the ``settings.CAS_WARN_TEMPLATE`` if the user asked to be warned before ticket emission and has not yep been warned. * The redirection to the service URL with a ticket GET parameter * The redirection to the service URL without a ticket if ticket generation failed and the :attr:`gateway` attribute is set * The rendering of the ``settings.CAS_LOGGED_TEMPLATE`` template with some error messages if the ticket generation failed (e.g: user not allowed). :rtype: django.http.HttpResponse
[ "Perform", "login", "against", "a", "service" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L673-L773
train
nitmir/django-cas-server
cas_server/views.py
LoginView.authenticated
def authenticated(self): """ Processing authenticated users :return: * The returned value of :meth:`service_login` if :attr:`service` is defined * The rendering of ``settings.CAS_LOGGED_TEMPLATE`` otherwise :rtype: django.http.HttpResponse """ # Try to get the current :class:`models.User<cas_server.models.User>` object for the current # session try: self.user = models.User.objects.get( username=self.request.session.get("username"), session_key=self.request.session.session_key ) # if not found, flush the session and redirect to the login page except models.User.DoesNotExist: logger.warning( "User %s seems authenticated but is not found in the database." % ( self.request.session.get("username"), ) ) self.logout() if self.ajax: data = { "status": "error", "detail": "login required", "url": utils.reverse_params("cas_server:login", params=self.request.GET) } return json_response(self.request, data) else: return utils.redirect_params("cas_server:login", params=self.request.GET) # if login against a service if self.service: return self.service_login() # else display the logged template else: if self.ajax: data = {"status": "success", "detail": "logged"} return json_response(self.request, data) else: return render( self.request, settings.CAS_LOGGED_TEMPLATE, utils.context({'session': self.request.session}) )
python
def authenticated(self): """ Processing authenticated users :return: * The returned value of :meth:`service_login` if :attr:`service` is defined * The rendering of ``settings.CAS_LOGGED_TEMPLATE`` otherwise :rtype: django.http.HttpResponse """ # Try to get the current :class:`models.User<cas_server.models.User>` object for the current # session try: self.user = models.User.objects.get( username=self.request.session.get("username"), session_key=self.request.session.session_key ) # if not found, flush the session and redirect to the login page except models.User.DoesNotExist: logger.warning( "User %s seems authenticated but is not found in the database." % ( self.request.session.get("username"), ) ) self.logout() if self.ajax: data = { "status": "error", "detail": "login required", "url": utils.reverse_params("cas_server:login", params=self.request.GET) } return json_response(self.request, data) else: return utils.redirect_params("cas_server:login", params=self.request.GET) # if login against a service if self.service: return self.service_login() # else display the logged template else: if self.ajax: data = {"status": "success", "detail": "logged"} return json_response(self.request, data) else: return render( self.request, settings.CAS_LOGGED_TEMPLATE, utils.context({'session': self.request.session}) )
[ "def", "authenticated", "(", "self", ")", ":", "# Try to get the current :class:`models.User<cas_server.models.User>` object for the current", "# session", "try", ":", "self", ".", "user", "=", "models", ".", "User", ".", "objects", ".", "get", "(", "username", "=", "self", ".", "request", ".", "session", ".", "get", "(", "\"username\"", ")", ",", "session_key", "=", "self", ".", "request", ".", "session", ".", "session_key", ")", "# if not found, flush the session and redirect to the login page", "except", "models", ".", "User", ".", "DoesNotExist", ":", "logger", ".", "warning", "(", "\"User %s seems authenticated but is not found in the database.\"", "%", "(", "self", ".", "request", ".", "session", ".", "get", "(", "\"username\"", ")", ",", ")", ")", "self", ".", "logout", "(", ")", "if", "self", ".", "ajax", ":", "data", "=", "{", "\"status\"", ":", "\"error\"", ",", "\"detail\"", ":", "\"login required\"", ",", "\"url\"", ":", "utils", ".", "reverse_params", "(", "\"cas_server:login\"", ",", "params", "=", "self", ".", "request", ".", "GET", ")", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")", "else", ":", "return", "utils", ".", "redirect_params", "(", "\"cas_server:login\"", ",", "params", "=", "self", ".", "request", ".", "GET", ")", "# if login against a service", "if", "self", ".", "service", ":", "return", "self", ".", "service_login", "(", ")", "# else display the logged template", "else", ":", "if", "self", ".", "ajax", ":", "data", "=", "{", "\"status\"", ":", "\"success\"", ",", "\"detail\"", ":", "\"logged\"", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")", "else", ":", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_LOGGED_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'session'", ":", "self", ".", "request", ".", "session", "}", ")", ")" ]
Processing authenticated users :return: * The returned value of :meth:`service_login` if :attr:`service` is defined * The rendering of ``settings.CAS_LOGGED_TEMPLATE`` otherwise :rtype: django.http.HttpResponse
[ "Processing", "authenticated", "users" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L775-L822
train
nitmir/django-cas-server
cas_server/views.py
LoginView.not_authenticated
def not_authenticated(self): """ Processing non authenticated users :return: * The rendering of ``settings.CAS_LOGIN_TEMPLATE`` with various messages depending of GET/POST parameters * The redirection to :class:`FederateAuth` if ``settings.CAS_FEDERATE`` is ``True`` and the "remember my identity provider" cookie is found :rtype: django.http.HttpResponse """ if self.service: try: service_pattern = ServicePattern.validate(self.service) if self.gateway and not self.ajax: # clean messages before leaving django list(messages.get_messages(self.request)) return HttpResponseRedirect(self.service) if settings.CAS_SHOW_SERVICE_MESSAGES: if self.request.session.get("authenticated") and self.renew: messages.add_message( self.request, messages.WARNING, _(u"Authentication renewal required by service %(name)s (%(url)s).") % {'name': service_pattern.name, 'url': self.service} ) else: messages.add_message( self.request, messages.WARNING, _(u"Authentication required by service %(name)s (%(url)s).") % {'name': service_pattern.name, 'url': self.service} ) except ServicePattern.DoesNotExist: if settings.CAS_SHOW_SERVICE_MESSAGES: messages.add_message( self.request, messages.ERROR, _(u'Service %s not allowed') % self.service ) if self.ajax: data = { "status": "error", "detail": "login required", "url": utils.reverse_params("cas_server:login", params=self.request.GET) } return json_response(self.request, data) else: if settings.CAS_FEDERATE: if self.username and self.ticket: return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({ 'form': self.form, 'auto_submit': True, 'post_url': reverse("cas_server:login") }) ) else: if ( self.request.COOKIES.get('remember_provider') and FederatedIendityProvider.objects.filter( suffix=self.request.COOKIES['remember_provider'] ) ): params = utils.copy_params(self.request.GET) url = utils.reverse_params( "cas_server:federateAuth", params=params, kwargs=dict(provider=self.request.COOKIES['remember_provider']) ) return HttpResponseRedirect(url) else: # if user is authenticated and auth renewal is requested, redirect directly # to the user identity provider if self.renew and self.request.session.get("authenticated"): try: user = FederatedUser.get_from_federated_username( self.request.session.get("username") ) params = utils.copy_params(self.request.GET) url = utils.reverse_params( "cas_server:federateAuth", params=params, kwargs=dict(provider=user.provider.suffix) ) return HttpResponseRedirect(url) # Should normally not happen: if the user is logged, it exists in the # database. except FederatedUser.DoesNotExist: # pragma: no cover pass return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({ 'form': self.form, 'post_url': reverse("cas_server:federateAuth") }) ) else: return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({'form': self.form}) )
python
def not_authenticated(self): """ Processing non authenticated users :return: * The rendering of ``settings.CAS_LOGIN_TEMPLATE`` with various messages depending of GET/POST parameters * The redirection to :class:`FederateAuth` if ``settings.CAS_FEDERATE`` is ``True`` and the "remember my identity provider" cookie is found :rtype: django.http.HttpResponse """ if self.service: try: service_pattern = ServicePattern.validate(self.service) if self.gateway and not self.ajax: # clean messages before leaving django list(messages.get_messages(self.request)) return HttpResponseRedirect(self.service) if settings.CAS_SHOW_SERVICE_MESSAGES: if self.request.session.get("authenticated") and self.renew: messages.add_message( self.request, messages.WARNING, _(u"Authentication renewal required by service %(name)s (%(url)s).") % {'name': service_pattern.name, 'url': self.service} ) else: messages.add_message( self.request, messages.WARNING, _(u"Authentication required by service %(name)s (%(url)s).") % {'name': service_pattern.name, 'url': self.service} ) except ServicePattern.DoesNotExist: if settings.CAS_SHOW_SERVICE_MESSAGES: messages.add_message( self.request, messages.ERROR, _(u'Service %s not allowed') % self.service ) if self.ajax: data = { "status": "error", "detail": "login required", "url": utils.reverse_params("cas_server:login", params=self.request.GET) } return json_response(self.request, data) else: if settings.CAS_FEDERATE: if self.username and self.ticket: return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({ 'form': self.form, 'auto_submit': True, 'post_url': reverse("cas_server:login") }) ) else: if ( self.request.COOKIES.get('remember_provider') and FederatedIendityProvider.objects.filter( suffix=self.request.COOKIES['remember_provider'] ) ): params = utils.copy_params(self.request.GET) url = utils.reverse_params( "cas_server:federateAuth", params=params, kwargs=dict(provider=self.request.COOKIES['remember_provider']) ) return HttpResponseRedirect(url) else: # if user is authenticated and auth renewal is requested, redirect directly # to the user identity provider if self.renew and self.request.session.get("authenticated"): try: user = FederatedUser.get_from_federated_username( self.request.session.get("username") ) params = utils.copy_params(self.request.GET) url = utils.reverse_params( "cas_server:federateAuth", params=params, kwargs=dict(provider=user.provider.suffix) ) return HttpResponseRedirect(url) # Should normally not happen: if the user is logged, it exists in the # database. except FederatedUser.DoesNotExist: # pragma: no cover pass return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({ 'form': self.form, 'post_url': reverse("cas_server:federateAuth") }) ) else: return render( self.request, settings.CAS_LOGIN_TEMPLATE, utils.context({'form': self.form}) )
[ "def", "not_authenticated", "(", "self", ")", ":", "if", "self", ".", "service", ":", "try", ":", "service_pattern", "=", "ServicePattern", ".", "validate", "(", "self", ".", "service", ")", "if", "self", ".", "gateway", "and", "not", "self", ".", "ajax", ":", "# clean messages before leaving django", "list", "(", "messages", ".", "get_messages", "(", "self", ".", "request", ")", ")", "return", "HttpResponseRedirect", "(", "self", ".", "service", ")", "if", "settings", ".", "CAS_SHOW_SERVICE_MESSAGES", ":", "if", "self", ".", "request", ".", "session", ".", "get", "(", "\"authenticated\"", ")", "and", "self", ".", "renew", ":", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "WARNING", ",", "_", "(", "u\"Authentication renewal required by service %(name)s (%(url)s).\"", ")", "%", "{", "'name'", ":", "service_pattern", ".", "name", ",", "'url'", ":", "self", ".", "service", "}", ")", "else", ":", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "WARNING", ",", "_", "(", "u\"Authentication required by service %(name)s (%(url)s).\"", ")", "%", "{", "'name'", ":", "service_pattern", ".", "name", ",", "'url'", ":", "self", ".", "service", "}", ")", "except", "ServicePattern", ".", "DoesNotExist", ":", "if", "settings", ".", "CAS_SHOW_SERVICE_MESSAGES", ":", "messages", ".", "add_message", "(", "self", ".", "request", ",", "messages", ".", "ERROR", ",", "_", "(", "u'Service %s not allowed'", ")", "%", "self", ".", "service", ")", "if", "self", ".", "ajax", ":", "data", "=", "{", "\"status\"", ":", "\"error\"", ",", "\"detail\"", ":", "\"login required\"", ",", "\"url\"", ":", "utils", ".", "reverse_params", "(", "\"cas_server:login\"", ",", "params", "=", "self", ".", "request", ".", "GET", ")", "}", "return", "json_response", "(", "self", ".", "request", ",", "data", ")", "else", ":", "if", "settings", ".", "CAS_FEDERATE", ":", "if", "self", ".", "username", "and", "self", ".", "ticket", ":", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_LOGIN_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'form'", ":", "self", ".", "form", ",", "'auto_submit'", ":", "True", ",", "'post_url'", ":", "reverse", "(", "\"cas_server:login\"", ")", "}", ")", ")", "else", ":", "if", "(", "self", ".", "request", ".", "COOKIES", ".", "get", "(", "'remember_provider'", ")", "and", "FederatedIendityProvider", ".", "objects", ".", "filter", "(", "suffix", "=", "self", ".", "request", ".", "COOKIES", "[", "'remember_provider'", "]", ")", ")", ":", "params", "=", "utils", ".", "copy_params", "(", "self", ".", "request", ".", "GET", ")", "url", "=", "utils", ".", "reverse_params", "(", "\"cas_server:federateAuth\"", ",", "params", "=", "params", ",", "kwargs", "=", "dict", "(", "provider", "=", "self", ".", "request", ".", "COOKIES", "[", "'remember_provider'", "]", ")", ")", "return", "HttpResponseRedirect", "(", "url", ")", "else", ":", "# if user is authenticated and auth renewal is requested, redirect directly", "# to the user identity provider", "if", "self", ".", "renew", "and", "self", ".", "request", ".", "session", ".", "get", "(", "\"authenticated\"", ")", ":", "try", ":", "user", "=", "FederatedUser", ".", "get_from_federated_username", "(", "self", ".", "request", ".", "session", ".", "get", "(", "\"username\"", ")", ")", "params", "=", "utils", ".", "copy_params", "(", "self", ".", "request", ".", "GET", ")", "url", "=", "utils", ".", "reverse_params", "(", "\"cas_server:federateAuth\"", ",", "params", "=", "params", ",", "kwargs", "=", "dict", "(", "provider", "=", "user", ".", "provider", ".", "suffix", ")", ")", "return", "HttpResponseRedirect", "(", "url", ")", "# Should normally not happen: if the user is logged, it exists in the", "# database.", "except", "FederatedUser", ".", "DoesNotExist", ":", "# pragma: no cover", "pass", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_LOGIN_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'form'", ":", "self", ".", "form", ",", "'post_url'", ":", "reverse", "(", "\"cas_server:federateAuth\"", ")", "}", ")", ")", "else", ":", "return", "render", "(", "self", ".", "request", ",", "settings", ".", "CAS_LOGIN_TEMPLATE", ",", "utils", ".", "context", "(", "{", "'form'", ":", "self", ".", "form", "}", ")", ")" ]
Processing non authenticated users :return: * The rendering of ``settings.CAS_LOGIN_TEMPLATE`` with various messages depending of GET/POST parameters * The redirection to :class:`FederateAuth` if ``settings.CAS_FEDERATE`` is ``True`` and the "remember my identity provider" cookie is found :rtype: django.http.HttpResponse
[ "Processing", "non", "authenticated", "users" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L824-L930
train
nitmir/django-cas-server
cas_server/views.py
LoginView.common
def common(self): """ Common part execute uppon GET and POST request :return: * The returned value of :meth:`authenticated` if the user is authenticated and not requesting for authentication or if the authentication has just been renewed * The returned value of :meth:`not_authenticated` otherwise :rtype: django.http.HttpResponse """ # if authenticated and successfully renewed authentication if needed if self.request.session.get("authenticated") and (not self.renew or self.renewed): return self.authenticated() else: return self.not_authenticated()
python
def common(self): """ Common part execute uppon GET and POST request :return: * The returned value of :meth:`authenticated` if the user is authenticated and not requesting for authentication or if the authentication has just been renewed * The returned value of :meth:`not_authenticated` otherwise :rtype: django.http.HttpResponse """ # if authenticated and successfully renewed authentication if needed if self.request.session.get("authenticated") and (not self.renew or self.renewed): return self.authenticated() else: return self.not_authenticated()
[ "def", "common", "(", "self", ")", ":", "# if authenticated and successfully renewed authentication if needed", "if", "self", ".", "request", ".", "session", ".", "get", "(", "\"authenticated\"", ")", "and", "(", "not", "self", ".", "renew", "or", "self", ".", "renewed", ")", ":", "return", "self", ".", "authenticated", "(", ")", "else", ":", "return", "self", ".", "not_authenticated", "(", ")" ]
Common part execute uppon GET and POST request :return: * The returned value of :meth:`authenticated` if the user is authenticated and not requesting for authentication or if the authentication has just been renewed * The returned value of :meth:`not_authenticated` otherwise :rtype: django.http.HttpResponse
[ "Common", "part", "execute", "uppon", "GET", "and", "POST", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L932-L946
train
nitmir/django-cas-server
cas_server/views.py
ValidateService.process_ticket
def process_ticket(self): """ fetch the ticket against the database and check its validity :raises ValidateError: if the ticket is not found or not valid, potentially for that service :returns: A couple (ticket, proxies list) :rtype: :obj:`tuple` """ try: proxies = [] if self.allow_proxy_ticket: ticket = models.Ticket.get(self.ticket, self.renew) else: ticket = models.ServiceTicket.get(self.ticket, self.renew) try: for prox in ticket.proxies.all(): proxies.append(prox.url) except AttributeError: pass if ticket.service != self.service: raise ValidateError(u'INVALID_SERVICE', self.service) return ticket, proxies except Ticket.DoesNotExist: raise ValidateError(u'INVALID_TICKET', self.ticket) except (ServiceTicket.DoesNotExist, ProxyTicket.DoesNotExist): raise ValidateError(u'INVALID_TICKET', 'ticket not found')
python
def process_ticket(self): """ fetch the ticket against the database and check its validity :raises ValidateError: if the ticket is not found or not valid, potentially for that service :returns: A couple (ticket, proxies list) :rtype: :obj:`tuple` """ try: proxies = [] if self.allow_proxy_ticket: ticket = models.Ticket.get(self.ticket, self.renew) else: ticket = models.ServiceTicket.get(self.ticket, self.renew) try: for prox in ticket.proxies.all(): proxies.append(prox.url) except AttributeError: pass if ticket.service != self.service: raise ValidateError(u'INVALID_SERVICE', self.service) return ticket, proxies except Ticket.DoesNotExist: raise ValidateError(u'INVALID_TICKET', self.ticket) except (ServiceTicket.DoesNotExist, ProxyTicket.DoesNotExist): raise ValidateError(u'INVALID_TICKET', 'ticket not found')
[ "def", "process_ticket", "(", "self", ")", ":", "try", ":", "proxies", "=", "[", "]", "if", "self", ".", "allow_proxy_ticket", ":", "ticket", "=", "models", ".", "Ticket", ".", "get", "(", "self", ".", "ticket", ",", "self", ".", "renew", ")", "else", ":", "ticket", "=", "models", ".", "ServiceTicket", ".", "get", "(", "self", ".", "ticket", ",", "self", ".", "renew", ")", "try", ":", "for", "prox", "in", "ticket", ".", "proxies", ".", "all", "(", ")", ":", "proxies", ".", "append", "(", "prox", ".", "url", ")", "except", "AttributeError", ":", "pass", "if", "ticket", ".", "service", "!=", "self", ".", "service", ":", "raise", "ValidateError", "(", "u'INVALID_SERVICE'", ",", "self", ".", "service", ")", "return", "ticket", ",", "proxies", "except", "Ticket", ".", "DoesNotExist", ":", "raise", "ValidateError", "(", "u'INVALID_TICKET'", ",", "self", ".", "ticket", ")", "except", "(", "ServiceTicket", ".", "DoesNotExist", ",", "ProxyTicket", ".", "DoesNotExist", ")", ":", "raise", "ValidateError", "(", "u'INVALID_TICKET'", ",", "'ticket not found'", ")" ]
fetch the ticket against the database and check its validity :raises ValidateError: if the ticket is not found or not valid, potentially for that service :returns: A couple (ticket, proxies list) :rtype: :obj:`tuple`
[ "fetch", "the", "ticket", "against", "the", "database", "and", "check", "its", "validity" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L1191-L1217
train
nitmir/django-cas-server
cas_server/views.py
ValidateService.process_pgturl
def process_pgturl(self, params): """ Handle PGT request :param dict params: A template context dict :raises ValidateError: if pgtUrl is invalid or if TLS validation of the pgtUrl fails :return: The rendering of ``cas_server/serviceValidate.xml``, using ``params`` :rtype: django.http.HttpResponse """ try: pattern = ServicePattern.validate(self.pgt_url) if pattern.proxy_callback: proxyid = utils.gen_pgtiou() pticket = ProxyGrantingTicket.objects.create( user=self.ticket.user, service=self.pgt_url, service_pattern=pattern, single_log_out=pattern.single_log_out ) url = utils.update_url(self.pgt_url, {'pgtIou': proxyid, 'pgtId': pticket.value}) try: ret = requests.get(url, verify=settings.CAS_PROXY_CA_CERTIFICATE_PATH) if ret.status_code == 200: params['proxyGrantingTicket'] = proxyid else: pticket.delete() logger.info( ( "ValidateService: ticket %s validated for user %s on service %s. " "Proxy Granting Ticket transmited to %s." ) % ( self.ticket.value, self.ticket.user.username, self.ticket.service, self.pgt_url ) ) logger.debug( "ValidateService: User attributs are:\n%s" % ( pprint.pformat(self.ticket.attributs), ) ) return render( self.request, "cas_server/serviceValidate.xml", params, content_type="text/xml; charset=utf-8" ) except requests.exceptions.RequestException as error: error = utils.unpack_nested_exception(error) raise ValidateError( u'INVALID_PROXY_CALLBACK', u"%s: %s" % (type(error), str(error)) ) else: raise ValidateError( u'INVALID_PROXY_CALLBACK', u"callback url not allowed by configuration" ) except ServicePattern.DoesNotExist: raise ValidateError( u'INVALID_PROXY_CALLBACK', u'callback url not allowed by configuration' )
python
def process_pgturl(self, params): """ Handle PGT request :param dict params: A template context dict :raises ValidateError: if pgtUrl is invalid or if TLS validation of the pgtUrl fails :return: The rendering of ``cas_server/serviceValidate.xml``, using ``params`` :rtype: django.http.HttpResponse """ try: pattern = ServicePattern.validate(self.pgt_url) if pattern.proxy_callback: proxyid = utils.gen_pgtiou() pticket = ProxyGrantingTicket.objects.create( user=self.ticket.user, service=self.pgt_url, service_pattern=pattern, single_log_out=pattern.single_log_out ) url = utils.update_url(self.pgt_url, {'pgtIou': proxyid, 'pgtId': pticket.value}) try: ret = requests.get(url, verify=settings.CAS_PROXY_CA_CERTIFICATE_PATH) if ret.status_code == 200: params['proxyGrantingTicket'] = proxyid else: pticket.delete() logger.info( ( "ValidateService: ticket %s validated for user %s on service %s. " "Proxy Granting Ticket transmited to %s." ) % ( self.ticket.value, self.ticket.user.username, self.ticket.service, self.pgt_url ) ) logger.debug( "ValidateService: User attributs are:\n%s" % ( pprint.pformat(self.ticket.attributs), ) ) return render( self.request, "cas_server/serviceValidate.xml", params, content_type="text/xml; charset=utf-8" ) except requests.exceptions.RequestException as error: error = utils.unpack_nested_exception(error) raise ValidateError( u'INVALID_PROXY_CALLBACK', u"%s: %s" % (type(error), str(error)) ) else: raise ValidateError( u'INVALID_PROXY_CALLBACK', u"callback url not allowed by configuration" ) except ServicePattern.DoesNotExist: raise ValidateError( u'INVALID_PROXY_CALLBACK', u'callback url not allowed by configuration' )
[ "def", "process_pgturl", "(", "self", ",", "params", ")", ":", "try", ":", "pattern", "=", "ServicePattern", ".", "validate", "(", "self", ".", "pgt_url", ")", "if", "pattern", ".", "proxy_callback", ":", "proxyid", "=", "utils", ".", "gen_pgtiou", "(", ")", "pticket", "=", "ProxyGrantingTicket", ".", "objects", ".", "create", "(", "user", "=", "self", ".", "ticket", ".", "user", ",", "service", "=", "self", ".", "pgt_url", ",", "service_pattern", "=", "pattern", ",", "single_log_out", "=", "pattern", ".", "single_log_out", ")", "url", "=", "utils", ".", "update_url", "(", "self", ".", "pgt_url", ",", "{", "'pgtIou'", ":", "proxyid", ",", "'pgtId'", ":", "pticket", ".", "value", "}", ")", "try", ":", "ret", "=", "requests", ".", "get", "(", "url", ",", "verify", "=", "settings", ".", "CAS_PROXY_CA_CERTIFICATE_PATH", ")", "if", "ret", ".", "status_code", "==", "200", ":", "params", "[", "'proxyGrantingTicket'", "]", "=", "proxyid", "else", ":", "pticket", ".", "delete", "(", ")", "logger", ".", "info", "(", "(", "\"ValidateService: ticket %s validated for user %s on service %s. \"", "\"Proxy Granting Ticket transmited to %s.\"", ")", "%", "(", "self", ".", "ticket", ".", "value", ",", "self", ".", "ticket", ".", "user", ".", "username", ",", "self", ".", "ticket", ".", "service", ",", "self", ".", "pgt_url", ")", ")", "logger", ".", "debug", "(", "\"ValidateService: User attributs are:\\n%s\"", "%", "(", "pprint", ".", "pformat", "(", "self", ".", "ticket", ".", "attributs", ")", ",", ")", ")", "return", "render", "(", "self", ".", "request", ",", "\"cas_server/serviceValidate.xml\"", ",", "params", ",", "content_type", "=", "\"text/xml; charset=utf-8\"", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "error", ":", "error", "=", "utils", ".", "unpack_nested_exception", "(", "error", ")", "raise", "ValidateError", "(", "u'INVALID_PROXY_CALLBACK'", ",", "u\"%s: %s\"", "%", "(", "type", "(", "error", ")", ",", "str", "(", "error", ")", ")", ")", "else", ":", "raise", "ValidateError", "(", "u'INVALID_PROXY_CALLBACK'", ",", "u\"callback url not allowed by configuration\"", ")", "except", "ServicePattern", ".", "DoesNotExist", ":", "raise", "ValidateError", "(", "u'INVALID_PROXY_CALLBACK'", ",", "u'callback url not allowed by configuration'", ")" ]
Handle PGT request :param dict params: A template context dict :raises ValidateError: if pgtUrl is invalid or if TLS validation of the pgtUrl fails :return: The rendering of ``cas_server/serviceValidate.xml``, using ``params`` :rtype: django.http.HttpResponse
[ "Handle", "PGT", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L1219-L1282
train
nitmir/django-cas-server
cas_server/views.py
Proxy.process_proxy
def process_proxy(self): """ handle PT request :raises ValidateError: if the PGT is not found, or the target service not allowed or the user not allowed on the tardet service. :return: The rendering of ``cas_server/proxy.xml`` :rtype: django.http.HttpResponse """ try: # is the target service allowed pattern = ServicePattern.validate(self.target_service) # to get a proxy ticket require that the service allow it if not pattern.proxy: raise ValidateError( u'UNAUTHORIZED_SERVICE', u'the service %s does not allow proxy tickets' % self.target_service ) # is the proxy granting ticket valid ticket = ProxyGrantingTicket.get(self.pgt) # is the pgt user allowed on the target service pattern.check_user(ticket.user) pticket = ticket.user.get_ticket( ProxyTicket, self.target_service, pattern, renew=False ) models.Proxy.objects.create(proxy_ticket=pticket, url=ticket.service) logger.info( "Proxy ticket created for user %s on service %s." % ( ticket.user.username, self.target_service ) ) return render( self.request, "cas_server/proxy.xml", {'ticket': pticket.value}, content_type="text/xml; charset=utf-8" ) except (Ticket.DoesNotExist, ProxyGrantingTicket.DoesNotExist): raise ValidateError(u'INVALID_TICKET', u'PGT %s not found' % self.pgt) except ServicePattern.DoesNotExist: raise ValidateError(u'UNAUTHORIZED_SERVICE', self.target_service) except (models.BadUsername, models.BadFilter, models.UserFieldNotDefined): raise ValidateError( u'UNAUTHORIZED_USER', u'User %s not allowed on %s' % (ticket.user.username, self.target_service) )
python
def process_proxy(self): """ handle PT request :raises ValidateError: if the PGT is not found, or the target service not allowed or the user not allowed on the tardet service. :return: The rendering of ``cas_server/proxy.xml`` :rtype: django.http.HttpResponse """ try: # is the target service allowed pattern = ServicePattern.validate(self.target_service) # to get a proxy ticket require that the service allow it if not pattern.proxy: raise ValidateError( u'UNAUTHORIZED_SERVICE', u'the service %s does not allow proxy tickets' % self.target_service ) # is the proxy granting ticket valid ticket = ProxyGrantingTicket.get(self.pgt) # is the pgt user allowed on the target service pattern.check_user(ticket.user) pticket = ticket.user.get_ticket( ProxyTicket, self.target_service, pattern, renew=False ) models.Proxy.objects.create(proxy_ticket=pticket, url=ticket.service) logger.info( "Proxy ticket created for user %s on service %s." % ( ticket.user.username, self.target_service ) ) return render( self.request, "cas_server/proxy.xml", {'ticket': pticket.value}, content_type="text/xml; charset=utf-8" ) except (Ticket.DoesNotExist, ProxyGrantingTicket.DoesNotExist): raise ValidateError(u'INVALID_TICKET', u'PGT %s not found' % self.pgt) except ServicePattern.DoesNotExist: raise ValidateError(u'UNAUTHORIZED_SERVICE', self.target_service) except (models.BadUsername, models.BadFilter, models.UserFieldNotDefined): raise ValidateError( u'UNAUTHORIZED_USER', u'User %s not allowed on %s' % (ticket.user.username, self.target_service) )
[ "def", "process_proxy", "(", "self", ")", ":", "try", ":", "# is the target service allowed", "pattern", "=", "ServicePattern", ".", "validate", "(", "self", ".", "target_service", ")", "# to get a proxy ticket require that the service allow it", "if", "not", "pattern", ".", "proxy", ":", "raise", "ValidateError", "(", "u'UNAUTHORIZED_SERVICE'", ",", "u'the service %s does not allow proxy tickets'", "%", "self", ".", "target_service", ")", "# is the proxy granting ticket valid", "ticket", "=", "ProxyGrantingTicket", ".", "get", "(", "self", ".", "pgt", ")", "# is the pgt user allowed on the target service", "pattern", ".", "check_user", "(", "ticket", ".", "user", ")", "pticket", "=", "ticket", ".", "user", ".", "get_ticket", "(", "ProxyTicket", ",", "self", ".", "target_service", ",", "pattern", ",", "renew", "=", "False", ")", "models", ".", "Proxy", ".", "objects", ".", "create", "(", "proxy_ticket", "=", "pticket", ",", "url", "=", "ticket", ".", "service", ")", "logger", ".", "info", "(", "\"Proxy ticket created for user %s on service %s.\"", "%", "(", "ticket", ".", "user", ".", "username", ",", "self", ".", "target_service", ")", ")", "return", "render", "(", "self", ".", "request", ",", "\"cas_server/proxy.xml\"", ",", "{", "'ticket'", ":", "pticket", ".", "value", "}", ",", "content_type", "=", "\"text/xml; charset=utf-8\"", ")", "except", "(", "Ticket", ".", "DoesNotExist", ",", "ProxyGrantingTicket", ".", "DoesNotExist", ")", ":", "raise", "ValidateError", "(", "u'INVALID_TICKET'", ",", "u'PGT %s not found'", "%", "self", ".", "pgt", ")", "except", "ServicePattern", ".", "DoesNotExist", ":", "raise", "ValidateError", "(", "u'UNAUTHORIZED_SERVICE'", ",", "self", ".", "target_service", ")", "except", "(", "models", ".", "BadUsername", ",", "models", ".", "BadFilter", ",", "models", ".", "UserFieldNotDefined", ")", ":", "raise", "ValidateError", "(", "u'UNAUTHORIZED_USER'", ",", "u'User %s not allowed on %s'", "%", "(", "ticket", ".", "user", ".", "username", ",", "self", ".", "target_service", ")", ")" ]
handle PT request :raises ValidateError: if the PGT is not found, or the target service not allowed or the user not allowed on the tardet service. :return: The rendering of ``cas_server/proxy.xml`` :rtype: django.http.HttpResponse
[ "handle", "PT", "request" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L1320-L1369
train
nitmir/django-cas-server
cas_server/views.py
SamlValidate.process_ticket
def process_ticket(self): """ validate ticket from SAML XML body :raises: SamlValidateError: if the ticket is not found or not valid, or if we fail to parse the posted XML. :return: a ticket object :rtype: :class:`models.Ticket<cas_server.models.Ticket>` """ try: auth_req = self.root.getchildren()[1].getchildren()[0] ticket = auth_req.getchildren()[0].text ticket = models.Ticket.get(ticket) if ticket.service != self.target: raise SamlValidateError( u'AuthnFailed', u'TARGET %s does not match ticket service' % self.target ) return ticket except (IndexError, KeyError): raise SamlValidateError(u'VersionMismatch') except Ticket.DoesNotExist: raise SamlValidateError( u'AuthnFailed', u'ticket %s should begin with PT- or ST-' % ticket ) except (ServiceTicket.DoesNotExist, ProxyTicket.DoesNotExist): raise SamlValidateError(u'AuthnFailed', u'ticket %s not found' % ticket)
python
def process_ticket(self): """ validate ticket from SAML XML body :raises: SamlValidateError: if the ticket is not found or not valid, or if we fail to parse the posted XML. :return: a ticket object :rtype: :class:`models.Ticket<cas_server.models.Ticket>` """ try: auth_req = self.root.getchildren()[1].getchildren()[0] ticket = auth_req.getchildren()[0].text ticket = models.Ticket.get(ticket) if ticket.service != self.target: raise SamlValidateError( u'AuthnFailed', u'TARGET %s does not match ticket service' % self.target ) return ticket except (IndexError, KeyError): raise SamlValidateError(u'VersionMismatch') except Ticket.DoesNotExist: raise SamlValidateError( u'AuthnFailed', u'ticket %s should begin with PT- or ST-' % ticket ) except (ServiceTicket.DoesNotExist, ProxyTicket.DoesNotExist): raise SamlValidateError(u'AuthnFailed', u'ticket %s not found' % ticket)
[ "def", "process_ticket", "(", "self", ")", ":", "try", ":", "auth_req", "=", "self", ".", "root", ".", "getchildren", "(", ")", "[", "1", "]", ".", "getchildren", "(", ")", "[", "0", "]", "ticket", "=", "auth_req", ".", "getchildren", "(", ")", "[", "0", "]", ".", "text", "ticket", "=", "models", ".", "Ticket", ".", "get", "(", "ticket", ")", "if", "ticket", ".", "service", "!=", "self", ".", "target", ":", "raise", "SamlValidateError", "(", "u'AuthnFailed'", ",", "u'TARGET %s does not match ticket service'", "%", "self", ".", "target", ")", "return", "ticket", "except", "(", "IndexError", ",", "KeyError", ")", ":", "raise", "SamlValidateError", "(", "u'VersionMismatch'", ")", "except", "Ticket", ".", "DoesNotExist", ":", "raise", "SamlValidateError", "(", "u'AuthnFailed'", ",", "u'ticket %s should begin with PT- or ST-'", "%", "ticket", ")", "except", "(", "ServiceTicket", ".", "DoesNotExist", ",", "ProxyTicket", ".", "DoesNotExist", ")", ":", "raise", "SamlValidateError", "(", "u'AuthnFailed'", ",", "u'ticket %s not found'", "%", "ticket", ")" ]
validate ticket from SAML XML body :raises: SamlValidateError: if the ticket is not found or not valid, or if we fail to parse the posted XML. :return: a ticket object :rtype: :class:`models.Ticket<cas_server.models.Ticket>`
[ "validate", "ticket", "from", "SAML", "XML", "body" ]
d106181b94c444f1946269da5c20f6c904840ad3
https://github.com/nitmir/django-cas-server/blob/d106181b94c444f1946269da5c20f6c904840ad3/cas_server/views.py#L1446-L1473
train
pipermerriam/flex
flex/cli.py
main
def main(source): """ For a given command line supplied argument, negotiate the content, parse the schema and then return any issues to stdout or if no schema issues, return success exit code. """ if source is None: click.echo( "You need to supply a file or url to a schema to a swagger schema, for" "the validator to work." ) return 1 try: load(source) click.echo("Validation passed") return 0 except ValidationError as e: raise click.ClickException(str(e))
python
def main(source): """ For a given command line supplied argument, negotiate the content, parse the schema and then return any issues to stdout or if no schema issues, return success exit code. """ if source is None: click.echo( "You need to supply a file or url to a schema to a swagger schema, for" "the validator to work." ) return 1 try: load(source) click.echo("Validation passed") return 0 except ValidationError as e: raise click.ClickException(str(e))
[ "def", "main", "(", "source", ")", ":", "if", "source", "is", "None", ":", "click", ".", "echo", "(", "\"You need to supply a file or url to a schema to a swagger schema, for\"", "\"the validator to work.\"", ")", "return", "1", "try", ":", "load", "(", "source", ")", "click", ".", "echo", "(", "\"Validation passed\"", ")", "return", "0", "except", "ValidationError", "as", "e", ":", "raise", "click", ".", "ClickException", "(", "str", "(", "e", ")", ")" ]
For a given command line supplied argument, negotiate the content, parse the schema and then return any issues to stdout or if no schema issues, return success exit code.
[ "For", "a", "given", "command", "line", "supplied", "argument", "negotiate", "the", "content", "parse", "the", "schema", "and", "then", "return", "any", "issues", "to", "stdout", "or", "if", "no", "schema", "issues", "return", "success", "exit", "code", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/cli.py#L12-L29
train
pipermerriam/flex
flex/core.py
load_source
def load_source(source): """ Common entry point for loading some form of raw swagger schema. Supports: - python object (dictionary-like) - path to yaml file - path to json file - file object (json or yaml). - json string. - yaml string. """ if isinstance(source, collections.Mapping): return deepcopy(source) elif hasattr(source, 'read') and callable(source.read): raw_source = source.read() elif os.path.exists(os.path.expanduser(str(source))): with open(os.path.expanduser(str(source)), 'r') as source_file: raw_source = source_file.read() elif isinstance(source, six.string_types): parts = urlparse.urlparse(source) if parts.scheme and parts.netloc: response = requests.get(source) if isinstance(response.content, six.binary_type): raw_source = six.text_type(response.content, encoding='utf-8') else: raw_source = response.content else: raw_source = source try: try: return json.loads(raw_source) except ValueError: pass try: return yaml.safe_load(raw_source) except (yaml.scanner.ScannerError, yaml.parser.ParserError): pass except NameError: pass raise ValueError( "Unable to parse `{0}`. Tried yaml and json.".format(source), )
python
def load_source(source): """ Common entry point for loading some form of raw swagger schema. Supports: - python object (dictionary-like) - path to yaml file - path to json file - file object (json or yaml). - json string. - yaml string. """ if isinstance(source, collections.Mapping): return deepcopy(source) elif hasattr(source, 'read') and callable(source.read): raw_source = source.read() elif os.path.exists(os.path.expanduser(str(source))): with open(os.path.expanduser(str(source)), 'r') as source_file: raw_source = source_file.read() elif isinstance(source, six.string_types): parts = urlparse.urlparse(source) if parts.scheme and parts.netloc: response = requests.get(source) if isinstance(response.content, six.binary_type): raw_source = six.text_type(response.content, encoding='utf-8') else: raw_source = response.content else: raw_source = source try: try: return json.loads(raw_source) except ValueError: pass try: return yaml.safe_load(raw_source) except (yaml.scanner.ScannerError, yaml.parser.ParserError): pass except NameError: pass raise ValueError( "Unable to parse `{0}`. Tried yaml and json.".format(source), )
[ "def", "load_source", "(", "source", ")", ":", "if", "isinstance", "(", "source", ",", "collections", ".", "Mapping", ")", ":", "return", "deepcopy", "(", "source", ")", "elif", "hasattr", "(", "source", ",", "'read'", ")", "and", "callable", "(", "source", ".", "read", ")", ":", "raw_source", "=", "source", ".", "read", "(", ")", "elif", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "expanduser", "(", "str", "(", "source", ")", ")", ")", ":", "with", "open", "(", "os", ".", "path", ".", "expanduser", "(", "str", "(", "source", ")", ")", ",", "'r'", ")", "as", "source_file", ":", "raw_source", "=", "source_file", ".", "read", "(", ")", "elif", "isinstance", "(", "source", ",", "six", ".", "string_types", ")", ":", "parts", "=", "urlparse", ".", "urlparse", "(", "source", ")", "if", "parts", ".", "scheme", "and", "parts", ".", "netloc", ":", "response", "=", "requests", ".", "get", "(", "source", ")", "if", "isinstance", "(", "response", ".", "content", ",", "six", ".", "binary_type", ")", ":", "raw_source", "=", "six", ".", "text_type", "(", "response", ".", "content", ",", "encoding", "=", "'utf-8'", ")", "else", ":", "raw_source", "=", "response", ".", "content", "else", ":", "raw_source", "=", "source", "try", ":", "try", ":", "return", "json", ".", "loads", "(", "raw_source", ")", "except", "ValueError", ":", "pass", "try", ":", "return", "yaml", ".", "safe_load", "(", "raw_source", ")", "except", "(", "yaml", ".", "scanner", ".", "ScannerError", ",", "yaml", ".", "parser", ".", "ParserError", ")", ":", "pass", "except", "NameError", ":", "pass", "raise", "ValueError", "(", "\"Unable to parse `{0}`. Tried yaml and json.\"", ".", "format", "(", "source", ")", ",", ")" ]
Common entry point for loading some form of raw swagger schema. Supports: - python object (dictionary-like) - path to yaml file - path to json file - file object (json or yaml). - json string. - yaml string.
[ "Common", "entry", "point", "for", "loading", "some", "form", "of", "raw", "swagger", "schema", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/core.py#L33-L78
train
pipermerriam/flex
flex/core.py
validate
def validate(raw_schema, target=None, **kwargs): """ Given the python representation of a JSONschema as defined in the swagger spec, validate that the schema complies to spec. If `target` is provided, that target will be validated against the provided schema. """ schema = schema_validator(raw_schema, **kwargs) if target is not None: validate_object(target, schema=schema, **kwargs)
python
def validate(raw_schema, target=None, **kwargs): """ Given the python representation of a JSONschema as defined in the swagger spec, validate that the schema complies to spec. If `target` is provided, that target will be validated against the provided schema. """ schema = schema_validator(raw_schema, **kwargs) if target is not None: validate_object(target, schema=schema, **kwargs)
[ "def", "validate", "(", "raw_schema", ",", "target", "=", "None", ",", "*", "*", "kwargs", ")", ":", "schema", "=", "schema_validator", "(", "raw_schema", ",", "*", "*", "kwargs", ")", "if", "target", "is", "not", "None", ":", "validate_object", "(", "target", ",", "schema", "=", "schema", ",", "*", "*", "kwargs", ")" ]
Given the python representation of a JSONschema as defined in the swagger spec, validate that the schema complies to spec. If `target` is provided, that target will be validated against the provided schema.
[ "Given", "the", "python", "representation", "of", "a", "JSONschema", "as", "defined", "in", "the", "swagger", "spec", "validate", "that", "the", "schema", "complies", "to", "spec", ".", "If", "target", "is", "provided", "that", "target", "will", "be", "validated", "against", "the", "provided", "schema", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/core.py#L103-L111
train
pipermerriam/flex
flex/core.py
validate_api_response
def validate_api_response(schema, raw_response, request_method='get', raw_request=None): """ Validate the response of an api call against a swagger schema. """ request = None if raw_request is not None: request = normalize_request(raw_request) response = None if raw_response is not None: response = normalize_response(raw_response, request=request) if response is not None: validate_response( response=response, request_method=request_method, schema=schema )
python
def validate_api_response(schema, raw_response, request_method='get', raw_request=None): """ Validate the response of an api call against a swagger schema. """ request = None if raw_request is not None: request = normalize_request(raw_request) response = None if raw_response is not None: response = normalize_response(raw_response, request=request) if response is not None: validate_response( response=response, request_method=request_method, schema=schema )
[ "def", "validate_api_response", "(", "schema", ",", "raw_response", ",", "request_method", "=", "'get'", ",", "raw_request", "=", "None", ")", ":", "request", "=", "None", "if", "raw_request", "is", "not", "None", ":", "request", "=", "normalize_request", "(", "raw_request", ")", "response", "=", "None", "if", "raw_response", "is", "not", "None", ":", "response", "=", "normalize_response", "(", "raw_response", ",", "request", "=", "request", ")", "if", "response", "is", "not", "None", ":", "validate_response", "(", "response", "=", "response", ",", "request_method", "=", "request_method", ",", "schema", "=", "schema", ")" ]
Validate the response of an api call against a swagger schema.
[ "Validate", "the", "response", "of", "an", "api", "call", "against", "a", "swagger", "schema", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/core.py#L121-L138
train
pipermerriam/flex
flex/parameters.py
find_parameter
def find_parameter(parameters, **kwargs): """ Given a list of parameters, find the one with the given name. """ matching_parameters = filter_parameters(parameters, **kwargs) if len(matching_parameters) == 1: return matching_parameters[0] elif len(matching_parameters) > 1: raise MultipleParametersFound() raise NoParameterFound()
python
def find_parameter(parameters, **kwargs): """ Given a list of parameters, find the one with the given name. """ matching_parameters = filter_parameters(parameters, **kwargs) if len(matching_parameters) == 1: return matching_parameters[0] elif len(matching_parameters) > 1: raise MultipleParametersFound() raise NoParameterFound()
[ "def", "find_parameter", "(", "parameters", ",", "*", "*", "kwargs", ")", ":", "matching_parameters", "=", "filter_parameters", "(", "parameters", ",", "*", "*", "kwargs", ")", "if", "len", "(", "matching_parameters", ")", "==", "1", ":", "return", "matching_parameters", "[", "0", "]", "elif", "len", "(", "matching_parameters", ")", ">", "1", ":", "raise", "MultipleParametersFound", "(", ")", "raise", "NoParameterFound", "(", ")" ]
Given a list of parameters, find the one with the given name.
[ "Given", "a", "list", "of", "parameters", "find", "the", "one", "with", "the", "given", "name", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/parameters.py#L25-L34
train
pipermerriam/flex
flex/parameters.py
merge_parameter_lists
def merge_parameter_lists(*parameter_definitions): """ Merge multiple lists of parameters into a single list. If there are any duplicate definitions, the last write wins. """ merged_parameters = {} for parameter_list in parameter_definitions: for parameter in parameter_list: key = (parameter['name'], parameter['in']) merged_parameters[key] = parameter return merged_parameters.values()
python
def merge_parameter_lists(*parameter_definitions): """ Merge multiple lists of parameters into a single list. If there are any duplicate definitions, the last write wins. """ merged_parameters = {} for parameter_list in parameter_definitions: for parameter in parameter_list: key = (parameter['name'], parameter['in']) merged_parameters[key] = parameter return merged_parameters.values()
[ "def", "merge_parameter_lists", "(", "*", "parameter_definitions", ")", ":", "merged_parameters", "=", "{", "}", "for", "parameter_list", "in", "parameter_definitions", ":", "for", "parameter", "in", "parameter_list", ":", "key", "=", "(", "parameter", "[", "'name'", "]", ",", "parameter", "[", "'in'", "]", ")", "merged_parameters", "[", "key", "]", "=", "parameter", "return", "merged_parameters", ".", "values", "(", ")" ]
Merge multiple lists of parameters into a single list. If there are any duplicate definitions, the last write wins.
[ "Merge", "multiple", "lists", "of", "parameters", "into", "a", "single", "list", ".", "If", "there", "are", "any", "duplicate", "definitions", "the", "last", "write", "wins", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/parameters.py#L37-L47
train
pipermerriam/flex
flex/validation/response.py
validate_status_code_to_response_definition
def validate_status_code_to_response_definition(response, operation_definition): """ Given a response, validate that the response status code is in the accepted status codes defined by this endpoint. If so, return the response definition that corresponds to the status code. """ status_code = response.status_code operation_responses = {str(code): val for code, val in operation_definition['responses'].items()} key = status_code if key not in operation_responses: key = 'default' try: response_definition = operation_responses[key] except KeyError: raise ValidationError( MESSAGES['response']['invalid_status_code'].format( status_code, ', '.join(operation_responses.keys()), ), ) return response_definition
python
def validate_status_code_to_response_definition(response, operation_definition): """ Given a response, validate that the response status code is in the accepted status codes defined by this endpoint. If so, return the response definition that corresponds to the status code. """ status_code = response.status_code operation_responses = {str(code): val for code, val in operation_definition['responses'].items()} key = status_code if key not in operation_responses: key = 'default' try: response_definition = operation_responses[key] except KeyError: raise ValidationError( MESSAGES['response']['invalid_status_code'].format( status_code, ', '.join(operation_responses.keys()), ), ) return response_definition
[ "def", "validate_status_code_to_response_definition", "(", "response", ",", "operation_definition", ")", ":", "status_code", "=", "response", ".", "status_code", "operation_responses", "=", "{", "str", "(", "code", ")", ":", "val", "for", "code", ",", "val", "in", "operation_definition", "[", "'responses'", "]", ".", "items", "(", ")", "}", "key", "=", "status_code", "if", "key", "not", "in", "operation_responses", ":", "key", "=", "'default'", "try", ":", "response_definition", "=", "operation_responses", "[", "key", "]", "except", "KeyError", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'response'", "]", "[", "'invalid_status_code'", "]", ".", "format", "(", "status_code", ",", "', '", ".", "join", "(", "operation_responses", ".", "keys", "(", ")", ")", ",", ")", ",", ")", "return", "response_definition" ]
Given a response, validate that the response status code is in the accepted status codes defined by this endpoint. If so, return the response definition that corresponds to the status code.
[ "Given", "a", "response", "validate", "that", "the", "response", "status", "code", "is", "in", "the", "accepted", "status", "codes", "defined", "by", "this", "endpoint", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/response.py#L37-L60
train
pipermerriam/flex
flex/validation/response.py
generate_path_validator
def generate_path_validator(api_path, path_definition, parameters, context, **kwargs): """ Generates a callable for validating the parameters in a response object. """ path_level_parameters = dereference_parameter_list( path_definition.get('parameters', []), context, ) operation_level_parameters = dereference_parameter_list( parameters, context, ) all_parameters = merge_parameter_lists( path_level_parameters, operation_level_parameters, ) # PATH in_path_parameters = filter_parameters(all_parameters, in_=PATH) return chain_reduce_partial( attrgetter('path'), generate_path_parameters_validator(api_path, in_path_parameters, context), )
python
def generate_path_validator(api_path, path_definition, parameters, context, **kwargs): """ Generates a callable for validating the parameters in a response object. """ path_level_parameters = dereference_parameter_list( path_definition.get('parameters', []), context, ) operation_level_parameters = dereference_parameter_list( parameters, context, ) all_parameters = merge_parameter_lists( path_level_parameters, operation_level_parameters, ) # PATH in_path_parameters = filter_parameters(all_parameters, in_=PATH) return chain_reduce_partial( attrgetter('path'), generate_path_parameters_validator(api_path, in_path_parameters, context), )
[ "def", "generate_path_validator", "(", "api_path", ",", "path_definition", ",", "parameters", ",", "context", ",", "*", "*", "kwargs", ")", ":", "path_level_parameters", "=", "dereference_parameter_list", "(", "path_definition", ".", "get", "(", "'parameters'", ",", "[", "]", ")", ",", "context", ",", ")", "operation_level_parameters", "=", "dereference_parameter_list", "(", "parameters", ",", "context", ",", ")", "all_parameters", "=", "merge_parameter_lists", "(", "path_level_parameters", ",", "operation_level_parameters", ",", ")", "# PATH", "in_path_parameters", "=", "filter_parameters", "(", "all_parameters", ",", "in_", "=", "PATH", ")", "return", "chain_reduce_partial", "(", "attrgetter", "(", "'path'", ")", ",", "generate_path_parameters_validator", "(", "api_path", ",", "in_path_parameters", ",", "context", ")", ",", ")" ]
Generates a callable for validating the parameters in a response object.
[ "Generates", "a", "callable", "for", "validating", "the", "parameters", "in", "a", "response", "object", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/response.py#L115-L139
train
pipermerriam/flex
flex/validation/response.py
validate_response
def validate_response(response, request_method, schema): """ Response validation involves the following steps. 4. validate that the response status_code is in the allowed responses for the request method. 5. validate that the response content validates against any provided schemas for the responses. 6. headers, content-types, etc..., ??? """ with ErrorDict() as errors: # 1 # TODO: tests try: api_path = validate_path_to_api_path( path=response.path, context=schema, **schema ) except ValidationError as err: errors['path'].extend(list(err.messages)) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema['paths'][api_path] or {} # TODO: tests try: operation_definition = validate_request_method_to_operation( request_method=request_method, path_definition=path_definition, ) except ValidationError as err: errors['method'].add_error(err.detail) return # 4 try: response_definition = validate_status_code_to_response_definition( response=response, operation_definition=operation_definition, ) except ValidationError as err: errors['status_code'].add_error(err.detail) else: # 5 response_validator = generate_response_validator( api_path, operation_definition=operation_definition, path_definition=path_definition, response_definition=response_definition, context=schema, ) try: response_validator(response, context=schema) except ValidationError as err: errors['body'].add_error(err.detail)
python
def validate_response(response, request_method, schema): """ Response validation involves the following steps. 4. validate that the response status_code is in the allowed responses for the request method. 5. validate that the response content validates against any provided schemas for the responses. 6. headers, content-types, etc..., ??? """ with ErrorDict() as errors: # 1 # TODO: tests try: api_path = validate_path_to_api_path( path=response.path, context=schema, **schema ) except ValidationError as err: errors['path'].extend(list(err.messages)) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema['paths'][api_path] or {} # TODO: tests try: operation_definition = validate_request_method_to_operation( request_method=request_method, path_definition=path_definition, ) except ValidationError as err: errors['method'].add_error(err.detail) return # 4 try: response_definition = validate_status_code_to_response_definition( response=response, operation_definition=operation_definition, ) except ValidationError as err: errors['status_code'].add_error(err.detail) else: # 5 response_validator = generate_response_validator( api_path, operation_definition=operation_definition, path_definition=path_definition, response_definition=response_definition, context=schema, ) try: response_validator(response, context=schema) except ValidationError as err: errors['body'].add_error(err.detail)
[ "def", "validate_response", "(", "response", ",", "request_method", ",", "schema", ")", ":", "with", "ErrorDict", "(", ")", "as", "errors", ":", "# 1", "# TODO: tests", "try", ":", "api_path", "=", "validate_path_to_api_path", "(", "path", "=", "response", ".", "path", ",", "context", "=", "schema", ",", "*", "*", "schema", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'path'", "]", ".", "extend", "(", "list", "(", "err", ".", "messages", ")", ")", "return", "# this causes an exception to be raised since errors is no longer falsy.", "path_definition", "=", "schema", "[", "'paths'", "]", "[", "api_path", "]", "or", "{", "}", "# TODO: tests", "try", ":", "operation_definition", "=", "validate_request_method_to_operation", "(", "request_method", "=", "request_method", ",", "path_definition", "=", "path_definition", ",", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'method'", "]", ".", "add_error", "(", "err", ".", "detail", ")", "return", "# 4", "try", ":", "response_definition", "=", "validate_status_code_to_response_definition", "(", "response", "=", "response", ",", "operation_definition", "=", "operation_definition", ",", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'status_code'", "]", ".", "add_error", "(", "err", ".", "detail", ")", "else", ":", "# 5", "response_validator", "=", "generate_response_validator", "(", "api_path", ",", "operation_definition", "=", "operation_definition", ",", "path_definition", "=", "path_definition", ",", "response_definition", "=", "response_definition", ",", "context", "=", "schema", ",", ")", "try", ":", "response_validator", "(", "response", ",", "context", "=", "schema", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'body'", "]", ".", "add_error", "(", "err", ".", "detail", ")" ]
Response validation involves the following steps. 4. validate that the response status_code is in the allowed responses for the request method. 5. validate that the response content validates against any provided schemas for the responses. 6. headers, content-types, etc..., ???
[ "Response", "validation", "involves", "the", "following", "steps", ".", "4", ".", "validate", "that", "the", "response", "status_code", "is", "in", "the", "allowed", "responses", "for", "the", "request", "method", ".", "5", ".", "validate", "that", "the", "response", "content", "validates", "against", "any", "provided", "schemas", "for", "the", "responses", ".", "6", ".", "headers", "content", "-", "types", "etc", "...", "???" ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/response.py#L194-L248
train
pipermerriam/flex
flex/validation/schema.py
construct_schema_validators
def construct_schema_validators(schema, context): """ Given a schema object, construct a dictionary of validators needed to validate a response matching the given schema. Special Cases: - $ref: These validators need to be Lazily evaluating so that circular validation dependencies do not result in an infinitely deep validation chain. - properties: These validators are meant to apply to properties of the object being validated rather than the object itself. In this case, we need recurse back into this function to generate a dictionary of validators for the property. """ validators = ValidationDict() if '$ref' in schema: validators.add_validator( '$ref', SchemaReferenceValidator(schema['$ref'], context), ) if 'properties' in schema: for property_, property_schema in schema['properties'].items(): property_validator = generate_object_validator( schema=property_schema, context=context, ) validators.add_property_validator(property_, property_validator) if schema.get('additionalProperties') is False: validators.add_validator( 'additionalProperties', generate_additional_properties_validator(context=context, **schema), ) assert 'context' not in schema for key in schema: if key in validator_mapping: validators.add_validator(key, validator_mapping[key](context=context, **schema)) return validators
python
def construct_schema_validators(schema, context): """ Given a schema object, construct a dictionary of validators needed to validate a response matching the given schema. Special Cases: - $ref: These validators need to be Lazily evaluating so that circular validation dependencies do not result in an infinitely deep validation chain. - properties: These validators are meant to apply to properties of the object being validated rather than the object itself. In this case, we need recurse back into this function to generate a dictionary of validators for the property. """ validators = ValidationDict() if '$ref' in schema: validators.add_validator( '$ref', SchemaReferenceValidator(schema['$ref'], context), ) if 'properties' in schema: for property_, property_schema in schema['properties'].items(): property_validator = generate_object_validator( schema=property_schema, context=context, ) validators.add_property_validator(property_, property_validator) if schema.get('additionalProperties') is False: validators.add_validator( 'additionalProperties', generate_additional_properties_validator(context=context, **schema), ) assert 'context' not in schema for key in schema: if key in validator_mapping: validators.add_validator(key, validator_mapping[key](context=context, **schema)) return validators
[ "def", "construct_schema_validators", "(", "schema", ",", "context", ")", ":", "validators", "=", "ValidationDict", "(", ")", "if", "'$ref'", "in", "schema", ":", "validators", ".", "add_validator", "(", "'$ref'", ",", "SchemaReferenceValidator", "(", "schema", "[", "'$ref'", "]", ",", "context", ")", ",", ")", "if", "'properties'", "in", "schema", ":", "for", "property_", ",", "property_schema", "in", "schema", "[", "'properties'", "]", ".", "items", "(", ")", ":", "property_validator", "=", "generate_object_validator", "(", "schema", "=", "property_schema", ",", "context", "=", "context", ",", ")", "validators", ".", "add_property_validator", "(", "property_", ",", "property_validator", ")", "if", "schema", ".", "get", "(", "'additionalProperties'", ")", "is", "False", ":", "validators", ".", "add_validator", "(", "'additionalProperties'", ",", "generate_additional_properties_validator", "(", "context", "=", "context", ",", "*", "*", "schema", ")", ",", ")", "assert", "'context'", "not", "in", "schema", "for", "key", "in", "schema", ":", "if", "key", "in", "validator_mapping", ":", "validators", ".", "add_validator", "(", "key", ",", "validator_mapping", "[", "key", "]", "(", "context", "=", "context", ",", "*", "*", "schema", ")", ")", "return", "validators" ]
Given a schema object, construct a dictionary of validators needed to validate a response matching the given schema. Special Cases: - $ref: These validators need to be Lazily evaluating so that circular validation dependencies do not result in an infinitely deep validation chain. - properties: These validators are meant to apply to properties of the object being validated rather than the object itself. In this case, we need recurse back into this function to generate a dictionary of validators for the property.
[ "Given", "a", "schema", "object", "construct", "a", "dictionary", "of", "validators", "needed", "to", "validate", "a", "response", "matching", "the", "given", "schema", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/schema.py#L199-L236
train
pipermerriam/flex
flex/validation/common.py
validate_type
def validate_type(value, types, **kwargs): """ Validate that the value is one of the provided primative types. """ if not is_value_of_any_type(value, types): raise ValidationError(MESSAGES['type']['invalid'].format( repr(value), get_type_for_value(value), types, ))
python
def validate_type(value, types, **kwargs): """ Validate that the value is one of the provided primative types. """ if not is_value_of_any_type(value, types): raise ValidationError(MESSAGES['type']['invalid'].format( repr(value), get_type_for_value(value), types, ))
[ "def", "validate_type", "(", "value", ",", "types", ",", "*", "*", "kwargs", ")", ":", "if", "not", "is_value_of_any_type", "(", "value", ",", "types", ")", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'type'", "]", "[", "'invalid'", "]", ".", "format", "(", "repr", "(", "value", ")", ",", "get_type_for_value", "(", "value", ")", ",", "types", ",", ")", ")" ]
Validate that the value is one of the provided primative types.
[ "Validate", "that", "the", "value", "is", "one", "of", "the", "provided", "primative", "types", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L56-L63
train
pipermerriam/flex
flex/validation/common.py
generate_type_validator
def generate_type_validator(type_, **kwargs): """ Generates a callable validator for the given type or iterable of types. """ if is_non_string_iterable(type_): types = tuple(type_) else: types = (type_,) # support x-nullable since Swagger 2.0 doesn't support null type # (see https://github.com/OAI/OpenAPI-Specification/issues/229) if kwargs.get('x-nullable', False) and NULL not in types: types = types + (NULL,) return functools.partial(validate_type, types=types)
python
def generate_type_validator(type_, **kwargs): """ Generates a callable validator for the given type or iterable of types. """ if is_non_string_iterable(type_): types = tuple(type_) else: types = (type_,) # support x-nullable since Swagger 2.0 doesn't support null type # (see https://github.com/OAI/OpenAPI-Specification/issues/229) if kwargs.get('x-nullable', False) and NULL not in types: types = types + (NULL,) return functools.partial(validate_type, types=types)
[ "def", "generate_type_validator", "(", "type_", ",", "*", "*", "kwargs", ")", ":", "if", "is_non_string_iterable", "(", "type_", ")", ":", "types", "=", "tuple", "(", "type_", ")", "else", ":", "types", "=", "(", "type_", ",", ")", "# support x-nullable since Swagger 2.0 doesn't support null type", "# (see https://github.com/OAI/OpenAPI-Specification/issues/229)", "if", "kwargs", ".", "get", "(", "'x-nullable'", ",", "False", ")", "and", "NULL", "not", "in", "types", ":", "types", "=", "types", "+", "(", "NULL", ",", ")", "return", "functools", ".", "partial", "(", "validate_type", ",", "types", "=", "types", ")" ]
Generates a callable validator for the given type or iterable of types.
[ "Generates", "a", "callable", "validator", "for", "the", "given", "type", "or", "iterable", "of", "types", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L67-L79
train
pipermerriam/flex
flex/validation/common.py
validate_multiple_of
def validate_multiple_of(value, divisor, **kwargs): """ Given a value and a divisor, validate that the value is divisible by the divisor. """ if not decimal.Decimal(str(value)) % decimal.Decimal(str(divisor)) == 0: raise ValidationError( MESSAGES['multiple_of']['invalid'].format(divisor, value), )
python
def validate_multiple_of(value, divisor, **kwargs): """ Given a value and a divisor, validate that the value is divisible by the divisor. """ if not decimal.Decimal(str(value)) % decimal.Decimal(str(divisor)) == 0: raise ValidationError( MESSAGES['multiple_of']['invalid'].format(divisor, value), )
[ "def", "validate_multiple_of", "(", "value", ",", "divisor", ",", "*", "*", "kwargs", ")", ":", "if", "not", "decimal", ".", "Decimal", "(", "str", "(", "value", ")", ")", "%", "decimal", ".", "Decimal", "(", "str", "(", "divisor", ")", ")", "==", "0", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'multiple_of'", "]", "[", "'invalid'", "]", ".", "format", "(", "divisor", ",", "value", ")", ",", ")" ]
Given a value and a divisor, validate that the value is divisible by the divisor.
[ "Given", "a", "value", "and", "a", "divisor", "validate", "that", "the", "value", "is", "divisible", "by", "the", "divisor", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L103-L111
train
pipermerriam/flex
flex/validation/common.py
validate_minimum
def validate_minimum(value, minimum, is_exclusive, **kwargs): """ Validator function for validating that a value does not violate it's minimum allowed value. This validation can be inclusive, or exclusive of the minimum depending on the value of `is_exclusive`. """ if is_exclusive: comparison_text = "greater than" compare_fn = operator.gt else: comparison_text = "greater than or equal to" compare_fn = operator.ge if not compare_fn(value, minimum): raise ValidationError( MESSAGES['minimum']['invalid'].format(value, comparison_text, minimum), )
python
def validate_minimum(value, minimum, is_exclusive, **kwargs): """ Validator function for validating that a value does not violate it's minimum allowed value. This validation can be inclusive, or exclusive of the minimum depending on the value of `is_exclusive`. """ if is_exclusive: comparison_text = "greater than" compare_fn = operator.gt else: comparison_text = "greater than or equal to" compare_fn = operator.ge if not compare_fn(value, minimum): raise ValidationError( MESSAGES['minimum']['invalid'].format(value, comparison_text, minimum), )
[ "def", "validate_minimum", "(", "value", ",", "minimum", ",", "is_exclusive", ",", "*", "*", "kwargs", ")", ":", "if", "is_exclusive", ":", "comparison_text", "=", "\"greater than\"", "compare_fn", "=", "operator", ".", "gt", "else", ":", "comparison_text", "=", "\"greater than or equal to\"", "compare_fn", "=", "operator", ".", "ge", "if", "not", "compare_fn", "(", "value", ",", "minimum", ")", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'minimum'", "]", "[", "'invalid'", "]", ".", "format", "(", "value", ",", "comparison_text", ",", "minimum", ")", ",", ")" ]
Validator function for validating that a value does not violate it's minimum allowed value. This validation can be inclusive, or exclusive of the minimum depending on the value of `is_exclusive`.
[ "Validator", "function", "for", "validating", "that", "a", "value", "does", "not", "violate", "it", "s", "minimum", "allowed", "value", ".", "This", "validation", "can", "be", "inclusive", "or", "exclusive", "of", "the", "minimum", "depending", "on", "the", "value", "of", "is_exclusive", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L120-L136
train
pipermerriam/flex
flex/validation/common.py
generate_minimum_validator
def generate_minimum_validator(minimum, exclusiveMinimum=False, **kwargs): """ Generator function returning a callable for minimum value validation. """ return functools.partial(validate_minimum, minimum=minimum, is_exclusive=exclusiveMinimum)
python
def generate_minimum_validator(minimum, exclusiveMinimum=False, **kwargs): """ Generator function returning a callable for minimum value validation. """ return functools.partial(validate_minimum, minimum=minimum, is_exclusive=exclusiveMinimum)
[ "def", "generate_minimum_validator", "(", "minimum", ",", "exclusiveMinimum", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "functools", ".", "partial", "(", "validate_minimum", ",", "minimum", "=", "minimum", ",", "is_exclusive", "=", "exclusiveMinimum", ")" ]
Generator function returning a callable for minimum value validation.
[ "Generator", "function", "returning", "a", "callable", "for", "minimum", "value", "validation", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L139-L143
train
pipermerriam/flex
flex/validation/common.py
validate_maximum
def validate_maximum(value, maximum, is_exclusive, **kwargs): """ Validator function for validating that a value does not violate it's maximum allowed value. This validation can be inclusive, or exclusive of the maximum depending on the value of `is_exclusive`. """ if is_exclusive: comparison_text = "less than" compare_fn = operator.lt else: comparison_text = "less than or equal to" compare_fn = operator.le if not compare_fn(value, maximum): raise ValidationError( MESSAGES['maximum']['invalid'].format(value, comparison_text, maximum), )
python
def validate_maximum(value, maximum, is_exclusive, **kwargs): """ Validator function for validating that a value does not violate it's maximum allowed value. This validation can be inclusive, or exclusive of the maximum depending on the value of `is_exclusive`. """ if is_exclusive: comparison_text = "less than" compare_fn = operator.lt else: comparison_text = "less than or equal to" compare_fn = operator.le if not compare_fn(value, maximum): raise ValidationError( MESSAGES['maximum']['invalid'].format(value, comparison_text, maximum), )
[ "def", "validate_maximum", "(", "value", ",", "maximum", ",", "is_exclusive", ",", "*", "*", "kwargs", ")", ":", "if", "is_exclusive", ":", "comparison_text", "=", "\"less than\"", "compare_fn", "=", "operator", ".", "lt", "else", ":", "comparison_text", "=", "\"less than or equal to\"", "compare_fn", "=", "operator", ".", "le", "if", "not", "compare_fn", "(", "value", ",", "maximum", ")", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'maximum'", "]", "[", "'invalid'", "]", ".", "format", "(", "value", ",", "comparison_text", ",", "maximum", ")", ",", ")" ]
Validator function for validating that a value does not violate it's maximum allowed value. This validation can be inclusive, or exclusive of the maximum depending on the value of `is_exclusive`.
[ "Validator", "function", "for", "validating", "that", "a", "value", "does", "not", "violate", "it", "s", "maximum", "allowed", "value", ".", "This", "validation", "can", "be", "inclusive", "or", "exclusive", "of", "the", "maximum", "depending", "on", "the", "value", "of", "is_exclusive", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L148-L164
train
pipermerriam/flex
flex/validation/common.py
generate_maximum_validator
def generate_maximum_validator(maximum, exclusiveMaximum=False, **kwargs): """ Generator function returning a callable for maximum value validation. """ return functools.partial(validate_maximum, maximum=maximum, is_exclusive=exclusiveMaximum)
python
def generate_maximum_validator(maximum, exclusiveMaximum=False, **kwargs): """ Generator function returning a callable for maximum value validation. """ return functools.partial(validate_maximum, maximum=maximum, is_exclusive=exclusiveMaximum)
[ "def", "generate_maximum_validator", "(", "maximum", ",", "exclusiveMaximum", "=", "False", ",", "*", "*", "kwargs", ")", ":", "return", "functools", ".", "partial", "(", "validate_maximum", ",", "maximum", "=", "maximum", ",", "is_exclusive", "=", "exclusiveMaximum", ")" ]
Generator function returning a callable for maximum value validation.
[ "Generator", "function", "returning", "a", "callable", "for", "maximum", "value", "validation", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L167-L171
train
pipermerriam/flex
flex/validation/common.py
validate_min_items
def validate_min_items(value, minimum, **kwargs): """ Validator for ARRAY types to enforce a minimum number of items allowed for the ARRAY to be valid. """ if len(value) < minimum: raise ValidationError( MESSAGES['min_items']['invalid'].format( minimum, len(value), ), )
python
def validate_min_items(value, minimum, **kwargs): """ Validator for ARRAY types to enforce a minimum number of items allowed for the ARRAY to be valid. """ if len(value) < minimum: raise ValidationError( MESSAGES['min_items']['invalid'].format( minimum, len(value), ), )
[ "def", "validate_min_items", "(", "value", ",", "minimum", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "value", ")", "<", "minimum", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'min_items'", "]", "[", "'invalid'", "]", ".", "format", "(", "minimum", ",", "len", "(", "value", ")", ",", ")", ",", ")" ]
Validator for ARRAY types to enforce a minimum number of items allowed for the ARRAY to be valid.
[ "Validator", "for", "ARRAY", "types", "to", "enforce", "a", "minimum", "number", "of", "items", "allowed", "for", "the", "ARRAY", "to", "be", "valid", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L204-L214
train
pipermerriam/flex
flex/validation/common.py
validate_max_items
def validate_max_items(value, maximum, **kwargs): """ Validator for ARRAY types to enforce a maximum number of items allowed for the ARRAY to be valid. """ if len(value) > maximum: raise ValidationError( MESSAGES['max_items']['invalid'].format( maximum, len(value), ), )
python
def validate_max_items(value, maximum, **kwargs): """ Validator for ARRAY types to enforce a maximum number of items allowed for the ARRAY to be valid. """ if len(value) > maximum: raise ValidationError( MESSAGES['max_items']['invalid'].format( maximum, len(value), ), )
[ "def", "validate_max_items", "(", "value", ",", "maximum", ",", "*", "*", "kwargs", ")", ":", "if", "len", "(", "value", ")", ">", "maximum", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'max_items'", "]", "[", "'invalid'", "]", ".", "format", "(", "maximum", ",", "len", "(", "value", ")", ",", ")", ",", ")" ]
Validator for ARRAY types to enforce a maximum number of items allowed for the ARRAY to be valid.
[ "Validator", "for", "ARRAY", "types", "to", "enforce", "a", "maximum", "number", "of", "items", "allowed", "for", "the", "ARRAY", "to", "be", "valid", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L226-L236
train
pipermerriam/flex
flex/validation/common.py
validate_unique_items
def validate_unique_items(value, **kwargs): """ Validator for ARRAY types to enforce that all array items must be unique. """ # we can't just look at the items themselves since 0 and False are treated # the same as dictionary keys, and objects aren't hashable. counter = collections.Counter(( json.dumps(v, sort_keys=True) for v in value )) dupes = [json.loads(v) for v, count in counter.items() if count > 1] if dupes: raise ValidationError( MESSAGES['unique_items']['invalid'].format( repr(dupes), ), )
python
def validate_unique_items(value, **kwargs): """ Validator for ARRAY types to enforce that all array items must be unique. """ # we can't just look at the items themselves since 0 and False are treated # the same as dictionary keys, and objects aren't hashable. counter = collections.Counter(( json.dumps(v, sort_keys=True) for v in value )) dupes = [json.loads(v) for v, count in counter.items() if count > 1] if dupes: raise ValidationError( MESSAGES['unique_items']['invalid'].format( repr(dupes), ), )
[ "def", "validate_unique_items", "(", "value", ",", "*", "*", "kwargs", ")", ":", "# we can't just look at the items themselves since 0 and False are treated", "# the same as dictionary keys, and objects aren't hashable.", "counter", "=", "collections", ".", "Counter", "(", "(", "json", ".", "dumps", "(", "v", ",", "sort_keys", "=", "True", ")", "for", "v", "in", "value", ")", ")", "dupes", "=", "[", "json", ".", "loads", "(", "v", ")", "for", "v", ",", "count", "in", "counter", ".", "items", "(", ")", "if", "count", ">", "1", "]", "if", "dupes", ":", "raise", "ValidationError", "(", "MESSAGES", "[", "'unique_items'", "]", "[", "'invalid'", "]", ".", "format", "(", "repr", "(", "dupes", ")", ",", ")", ",", ")" ]
Validator for ARRAY types to enforce that all array items must be unique.
[ "Validator", "for", "ARRAY", "types", "to", "enforce", "that", "all", "array", "items", "must", "be", "unique", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L248-L264
train
pipermerriam/flex
flex/validation/common.py
validate_object
def validate_object(obj, field_validators=None, non_field_validators=None, schema=None, context=None): """ Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur. """ if schema is None: schema = {} if context is None: context = {} if field_validators is None: field_validators = ValidationDict() if non_field_validators is None: non_field_validators = ValidationList() from flex.validation.schema import ( construct_schema_validators, ) schema_validators = construct_schema_validators(schema, context) if '$ref' in schema_validators and hasattr(schema_validators['$ref'], 'validators'): ref_ = field_validators.pop('$ref') for k, v in ref_.validators.items(): if k not in schema_validators: schema_validators.add_validator(k, v) if 'discriminator' in schema: schema_validators = add_polymorphism_requirements(obj, schema, context, schema_validators) # delete resolved discriminator to avoid infinite recursion del schema['discriminator'] schema_validators.update(field_validators) schema_validators.validate_object(obj, context=context) non_field_validators.validate_object(obj, context=context) return obj
python
def validate_object(obj, field_validators=None, non_field_validators=None, schema=None, context=None): """ Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur. """ if schema is None: schema = {} if context is None: context = {} if field_validators is None: field_validators = ValidationDict() if non_field_validators is None: non_field_validators = ValidationList() from flex.validation.schema import ( construct_schema_validators, ) schema_validators = construct_schema_validators(schema, context) if '$ref' in schema_validators and hasattr(schema_validators['$ref'], 'validators'): ref_ = field_validators.pop('$ref') for k, v in ref_.validators.items(): if k not in schema_validators: schema_validators.add_validator(k, v) if 'discriminator' in schema: schema_validators = add_polymorphism_requirements(obj, schema, context, schema_validators) # delete resolved discriminator to avoid infinite recursion del schema['discriminator'] schema_validators.update(field_validators) schema_validators.validate_object(obj, context=context) non_field_validators.validate_object(obj, context=context) return obj
[ "def", "validate_object", "(", "obj", ",", "field_validators", "=", "None", ",", "non_field_validators", "=", "None", ",", "schema", "=", "None", ",", "context", "=", "None", ")", ":", "if", "schema", "is", "None", ":", "schema", "=", "{", "}", "if", "context", "is", "None", ":", "context", "=", "{", "}", "if", "field_validators", "is", "None", ":", "field_validators", "=", "ValidationDict", "(", ")", "if", "non_field_validators", "is", "None", ":", "non_field_validators", "=", "ValidationList", "(", ")", "from", "flex", ".", "validation", ".", "schema", "import", "(", "construct_schema_validators", ",", ")", "schema_validators", "=", "construct_schema_validators", "(", "schema", ",", "context", ")", "if", "'$ref'", "in", "schema_validators", "and", "hasattr", "(", "schema_validators", "[", "'$ref'", "]", ",", "'validators'", ")", ":", "ref_", "=", "field_validators", ".", "pop", "(", "'$ref'", ")", "for", "k", ",", "v", "in", "ref_", ".", "validators", ".", "items", "(", ")", ":", "if", "k", "not", "in", "schema_validators", ":", "schema_validators", ".", "add_validator", "(", "k", ",", "v", ")", "if", "'discriminator'", "in", "schema", ":", "schema_validators", "=", "add_polymorphism_requirements", "(", "obj", ",", "schema", ",", "context", ",", "schema_validators", ")", "# delete resolved discriminator to avoid infinite recursion", "del", "schema", "[", "'discriminator'", "]", "schema_validators", ".", "update", "(", "field_validators", ")", "schema_validators", ".", "validate_object", "(", "obj", ",", "context", "=", "context", ")", "non_field_validators", ".", "validate_object", "(", "obj", ",", "context", "=", "context", ")", "return", "obj" ]
Takes a mapping and applies a mapping of validator functions to it collecting and reraising any validation errors that occur.
[ "Takes", "a", "mapping", "and", "applies", "a", "mapping", "of", "validator", "functions", "to", "it", "collecting", "and", "reraising", "any", "validation", "errors", "that", "occur", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L365-L398
train
pipermerriam/flex
flex/validation/common.py
validate_request_method_to_operation
def validate_request_method_to_operation(request_method, path_definition): """ Given a request method, validate that the request method is valid for the api path. If so, return the operation definition related to this request method. """ try: operation_definition = path_definition[request_method] except KeyError: allowed_methods = set(REQUEST_METHODS).intersection(path_definition.keys()) raise ValidationError( MESSAGES['request']['invalid_method'].format( request_method, allowed_methods, ), ) return operation_definition
python
def validate_request_method_to_operation(request_method, path_definition): """ Given a request method, validate that the request method is valid for the api path. If so, return the operation definition related to this request method. """ try: operation_definition = path_definition[request_method] except KeyError: allowed_methods = set(REQUEST_METHODS).intersection(path_definition.keys()) raise ValidationError( MESSAGES['request']['invalid_method'].format( request_method, allowed_methods, ), ) return operation_definition
[ "def", "validate_request_method_to_operation", "(", "request_method", ",", "path_definition", ")", ":", "try", ":", "operation_definition", "=", "path_definition", "[", "request_method", "]", "except", "KeyError", ":", "allowed_methods", "=", "set", "(", "REQUEST_METHODS", ")", ".", "intersection", "(", "path_definition", ".", "keys", "(", ")", ")", "raise", "ValidationError", "(", "MESSAGES", "[", "'request'", "]", "[", "'invalid_method'", "]", ".", "format", "(", "request_method", ",", "allowed_methods", ",", ")", ",", ")", "return", "operation_definition" ]
Given a request method, validate that the request method is valid for the api path. If so, return the operation definition related to this request method.
[ "Given", "a", "request", "method", "validate", "that", "the", "request", "method", "is", "valid", "for", "the", "api", "path", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L498-L514
train
pipermerriam/flex
flex/validation/common.py
validate_path_to_api_path
def validate_path_to_api_path(path, paths, basePath='', context=None, **kwargs): """ Given a path, find the api_path it matches. """ if context is None: context = {} try: api_path = match_path_to_api_path( path_definitions=paths, target_path=path, base_path=basePath, context=context, ) except LookupError as err: raise ValidationError(str(err)) except MultiplePathsFound as err: raise ValidationError(str(err)) return api_path
python
def validate_path_to_api_path(path, paths, basePath='', context=None, **kwargs): """ Given a path, find the api_path it matches. """ if context is None: context = {} try: api_path = match_path_to_api_path( path_definitions=paths, target_path=path, base_path=basePath, context=context, ) except LookupError as err: raise ValidationError(str(err)) except MultiplePathsFound as err: raise ValidationError(str(err)) return api_path
[ "def", "validate_path_to_api_path", "(", "path", ",", "paths", ",", "basePath", "=", "''", ",", "context", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "context", "is", "None", ":", "context", "=", "{", "}", "try", ":", "api_path", "=", "match_path_to_api_path", "(", "path_definitions", "=", "paths", ",", "target_path", "=", "path", ",", "base_path", "=", "basePath", ",", "context", "=", "context", ",", ")", "except", "LookupError", "as", "err", ":", "raise", "ValidationError", "(", "str", "(", "err", ")", ")", "except", "MultiplePathsFound", "as", "err", ":", "raise", "ValidationError", "(", "str", "(", "err", ")", ")", "return", "api_path" ]
Given a path, find the api_path it matches.
[ "Given", "a", "path", "find", "the", "api_path", "it", "matches", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/common.py#L517-L535
train
pipermerriam/flex
flex/validation/parameter.py
validate_path_parameters
def validate_path_parameters(target_path, api_path, path_parameters, context): """ Helper function for validating a request path """ base_path = context.get('basePath', '') full_api_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + api_path) parameter_values = get_path_parameter_values( target_path, full_api_path, path_parameters, context, ) validate_parameters(parameter_values, path_parameters, context=context)
python
def validate_path_parameters(target_path, api_path, path_parameters, context): """ Helper function for validating a request path """ base_path = context.get('basePath', '') full_api_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + api_path) parameter_values = get_path_parameter_values( target_path, full_api_path, path_parameters, context, ) validate_parameters(parameter_values, path_parameters, context=context)
[ "def", "validate_path_parameters", "(", "target_path", ",", "api_path", ",", "path_parameters", ",", "context", ")", ":", "base_path", "=", "context", ".", "get", "(", "'basePath'", ",", "''", ")", "full_api_path", "=", "re", ".", "sub", "(", "NORMALIZE_SLASH_REGEX", ",", "'/'", ",", "base_path", "+", "api_path", ")", "parameter_values", "=", "get_path_parameter_values", "(", "target_path", ",", "full_api_path", ",", "path_parameters", ",", "context", ",", ")", "validate_parameters", "(", "parameter_values", ",", "path_parameters", ",", "context", "=", "context", ")" ]
Helper function for validating a request path
[ "Helper", "function", "for", "validating", "a", "request", "path" ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/parameter.py#L75-L84
train
pipermerriam/flex
flex/validation/parameter.py
construct_parameter_validators
def construct_parameter_validators(parameter, context): """ Constructs a dictionary of validator functions for the provided parameter definition. """ validators = ValidationDict() if '$ref' in parameter: validators.add_validator( '$ref', ParameterReferenceValidator(parameter['$ref'], context), ) for key in parameter: if key in validator_mapping: validators.add_validator( key, validator_mapping[key](context=context, **parameter), ) if 'schema' in parameter: schema_validators = construct_schema_validators(parameter['schema'], context=context) for key, value in schema_validators.items(): validators.setdefault(key, value) return validators
python
def construct_parameter_validators(parameter, context): """ Constructs a dictionary of validator functions for the provided parameter definition. """ validators = ValidationDict() if '$ref' in parameter: validators.add_validator( '$ref', ParameterReferenceValidator(parameter['$ref'], context), ) for key in parameter: if key in validator_mapping: validators.add_validator( key, validator_mapping[key](context=context, **parameter), ) if 'schema' in parameter: schema_validators = construct_schema_validators(parameter['schema'], context=context) for key, value in schema_validators.items(): validators.setdefault(key, value) return validators
[ "def", "construct_parameter_validators", "(", "parameter", ",", "context", ")", ":", "validators", "=", "ValidationDict", "(", ")", "if", "'$ref'", "in", "parameter", ":", "validators", ".", "add_validator", "(", "'$ref'", ",", "ParameterReferenceValidator", "(", "parameter", "[", "'$ref'", "]", ",", "context", ")", ",", ")", "for", "key", "in", "parameter", ":", "if", "key", "in", "validator_mapping", ":", "validators", ".", "add_validator", "(", "key", ",", "validator_mapping", "[", "key", "]", "(", "context", "=", "context", ",", "*", "*", "parameter", ")", ",", ")", "if", "'schema'", "in", "parameter", ":", "schema_validators", "=", "construct_schema_validators", "(", "parameter", "[", "'schema'", "]", ",", "context", "=", "context", ")", "for", "key", ",", "value", "in", "schema_validators", ".", "items", "(", ")", ":", "validators", ".", "setdefault", "(", "key", ",", "value", ")", "return", "validators" ]
Constructs a dictionary of validator functions for the provided parameter definition.
[ "Constructs", "a", "dictionary", "of", "validator", "functions", "for", "the", "provided", "parameter", "definition", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/parameter.py#L109-L129
train
pipermerriam/flex
flex/validation/parameter.py
construct_multi_parameter_validators
def construct_multi_parameter_validators(parameters, context): """ Given an iterable of parameters, returns a dictionary of validator functions for each parameter. Note that this expects the parameters to be unique in their name value, and throws an error if this is not the case. """ validators = ValidationDict() for parameter in parameters: key = parameter['name'] if key in validators: raise ValueError("Duplicate parameter name {0}".format(key)) parameter_validators = construct_parameter_validators(parameter, context=context) validators.add_validator( key, generate_object_validator(field_validators=parameter_validators), ) return validators
python
def construct_multi_parameter_validators(parameters, context): """ Given an iterable of parameters, returns a dictionary of validator functions for each parameter. Note that this expects the parameters to be unique in their name value, and throws an error if this is not the case. """ validators = ValidationDict() for parameter in parameters: key = parameter['name'] if key in validators: raise ValueError("Duplicate parameter name {0}".format(key)) parameter_validators = construct_parameter_validators(parameter, context=context) validators.add_validator( key, generate_object_validator(field_validators=parameter_validators), ) return validators
[ "def", "construct_multi_parameter_validators", "(", "parameters", ",", "context", ")", ":", "validators", "=", "ValidationDict", "(", ")", "for", "parameter", "in", "parameters", ":", "key", "=", "parameter", "[", "'name'", "]", "if", "key", "in", "validators", ":", "raise", "ValueError", "(", "\"Duplicate parameter name {0}\"", ".", "format", "(", "key", ")", ")", "parameter_validators", "=", "construct_parameter_validators", "(", "parameter", ",", "context", "=", "context", ")", "validators", ".", "add_validator", "(", "key", ",", "generate_object_validator", "(", "field_validators", "=", "parameter_validators", ")", ",", ")", "return", "validators" ]
Given an iterable of parameters, returns a dictionary of validator functions for each parameter. Note that this expects the parameters to be unique in their name value, and throws an error if this is not the case.
[ "Given", "an", "iterable", "of", "parameters", "returns", "a", "dictionary", "of", "validator", "functions", "for", "each", "parameter", ".", "Note", "that", "this", "expects", "the", "parameters", "to", "be", "unique", "in", "their", "name", "value", "and", "throws", "an", "error", "if", "this", "is", "not", "the", "case", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/parameter.py#L150-L167
train
pipermerriam/flex
flex/validation/path.py
generate_path_parameters_validator
def generate_path_parameters_validator(api_path, path_parameters, context): """ Generates a validator function that given a path, validates that it against the path parameters """ path_parameter_validator = functools.partial( validate_path_parameters, api_path=api_path, path_parameters=path_parameters, context=context, ) return path_parameter_validator
python
def generate_path_parameters_validator(api_path, path_parameters, context): """ Generates a validator function that given a path, validates that it against the path parameters """ path_parameter_validator = functools.partial( validate_path_parameters, api_path=api_path, path_parameters=path_parameters, context=context, ) return path_parameter_validator
[ "def", "generate_path_parameters_validator", "(", "api_path", ",", "path_parameters", ",", "context", ")", ":", "path_parameter_validator", "=", "functools", ".", "partial", "(", "validate_path_parameters", ",", "api_path", "=", "api_path", ",", "path_parameters", "=", "path_parameters", ",", "context", "=", "context", ",", ")", "return", "path_parameter_validator" ]
Generates a validator function that given a path, validates that it against the path parameters
[ "Generates", "a", "validator", "function", "that", "given", "a", "path", "validates", "that", "it", "against", "the", "path", "parameters" ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/path.py#L8-L19
train
pipermerriam/flex
flex/paths.py
escape_regex_special_chars
def escape_regex_special_chars(api_path): """ Turns the non prametrized path components into strings subtable for using as a regex pattern. This primarily involves escaping special characters so that the actual character is matched in the regex. """ def substitute(string, replacements): pattern, repl = replacements return re.sub(pattern, repl, string) return functools.reduce(substitute, REGEX_REPLACEMENTS, api_path)
python
def escape_regex_special_chars(api_path): """ Turns the non prametrized path components into strings subtable for using as a regex pattern. This primarily involves escaping special characters so that the actual character is matched in the regex. """ def substitute(string, replacements): pattern, repl = replacements return re.sub(pattern, repl, string) return functools.reduce(substitute, REGEX_REPLACEMENTS, api_path)
[ "def", "escape_regex_special_chars", "(", "api_path", ")", ":", "def", "substitute", "(", "string", ",", "replacements", ")", ":", "pattern", ",", "repl", "=", "replacements", "return", "re", ".", "sub", "(", "pattern", ",", "repl", ",", "string", ")", "return", "functools", ".", "reduce", "(", "substitute", ",", "REGEX_REPLACEMENTS", ",", "api_path", ")" ]
Turns the non prametrized path components into strings subtable for using as a regex pattern. This primarily involves escaping special characters so that the actual character is matched in the regex.
[ "Turns", "the", "non", "prametrized", "path", "components", "into", "strings", "subtable", "for", "using", "as", "a", "regex", "pattern", ".", "This", "primarily", "involves", "escaping", "special", "characters", "so", "that", "the", "actual", "character", "is", "matched", "in", "the", "regex", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/paths.py#L26-L36
train
pipermerriam/flex
flex/paths.py
construct_parameter_pattern
def construct_parameter_pattern(parameter): """ Given a parameter definition returns a regex pattern that will match that part of the path. """ name = parameter['name'] type = parameter['type'] repeated = '[^/]' if type == 'integer': repeated = '\d' return "(?P<{name}>{repeated}+)".format(name=name, repeated=repeated)
python
def construct_parameter_pattern(parameter): """ Given a parameter definition returns a regex pattern that will match that part of the path. """ name = parameter['name'] type = parameter['type'] repeated = '[^/]' if type == 'integer': repeated = '\d' return "(?P<{name}>{repeated}+)".format(name=name, repeated=repeated)
[ "def", "construct_parameter_pattern", "(", "parameter", ")", ":", "name", "=", "parameter", "[", "'name'", "]", "type", "=", "parameter", "[", "'type'", "]", "repeated", "=", "'[^/]'", "if", "type", "==", "'integer'", ":", "repeated", "=", "'\\d'", "return", "\"(?P<{name}>{repeated}+)\"", ".", "format", "(", "name", "=", "name", ",", "repeated", "=", "repeated", ")" ]
Given a parameter definition returns a regex pattern that will match that part of the path.
[ "Given", "a", "parameter", "definition", "returns", "a", "regex", "pattern", "that", "will", "match", "that", "part", "of", "the", "path", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/paths.py#L44-L57
train
pipermerriam/flex
flex/paths.py
path_to_pattern
def path_to_pattern(api_path, parameters): """ Given an api path, possibly with parameter notation, return a pattern suitable for turing into a regular expression which will match request paths that conform to the parameter definitions and the api path. """ parts = re.split(PARAMETER_REGEX, api_path) pattern = ''.join((process_path_part(part, parameters) for part in parts)) if not pattern.startswith('^'): pattern = "^{0}".format(pattern) if not pattern.endswith('$'): pattern = "{0}$".format(pattern) return pattern
python
def path_to_pattern(api_path, parameters): """ Given an api path, possibly with parameter notation, return a pattern suitable for turing into a regular expression which will match request paths that conform to the parameter definitions and the api path. """ parts = re.split(PARAMETER_REGEX, api_path) pattern = ''.join((process_path_part(part, parameters) for part in parts)) if not pattern.startswith('^'): pattern = "^{0}".format(pattern) if not pattern.endswith('$'): pattern = "{0}$".format(pattern) return pattern
[ "def", "path_to_pattern", "(", "api_path", ",", "parameters", ")", ":", "parts", "=", "re", ".", "split", "(", "PARAMETER_REGEX", ",", "api_path", ")", "pattern", "=", "''", ".", "join", "(", "(", "process_path_part", "(", "part", ",", "parameters", ")", "for", "part", "in", "parts", ")", ")", "if", "not", "pattern", ".", "startswith", "(", "'^'", ")", ":", "pattern", "=", "\"^{0}\"", ".", "format", "(", "pattern", ")", "if", "not", "pattern", ".", "endswith", "(", "'$'", ")", ":", "pattern", "=", "\"{0}$\"", ".", "format", "(", "pattern", ")", "return", "pattern" ]
Given an api path, possibly with parameter notation, return a pattern suitable for turing into a regular expression which will match request paths that conform to the parameter definitions and the api path.
[ "Given", "an", "api", "path", "possibly", "with", "parameter", "notation", "return", "a", "pattern", "suitable", "for", "turing", "into", "a", "regular", "expression", "which", "will", "match", "request", "paths", "that", "conform", "to", "the", "parameter", "definitions", "and", "the", "api", "path", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/paths.py#L87-L101
train
pipermerriam/flex
flex/paths.py
match_path_to_api_path
def match_path_to_api_path(path_definitions, target_path, base_path='', context=None): """ Match a request or response path to one of the api paths. Anything other than exactly one match is an error condition. """ if context is None: context = {} assert isinstance(context, collections.Mapping) if target_path.startswith(base_path): # Convert all of the api paths into Path instances for easier regex # matching. normalized_target_path = re.sub(NORMALIZE_SLASH_REGEX, '/', target_path) matching_api_paths = list() matching_api_paths_regex = list() for p, v in path_definitions.items(): # Doing this to help with case where we might have base_path # being just /, and then the path starts with / as well. full_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + p) r = path_to_regex( api_path=full_path, path_parameters=extract_path_parameters(v), operation_parameters=extract_operation_parameters(v), context=context, ) if full_path == normalized_target_path: matching_api_paths.append(p) elif r.match(normalized_target_path): matching_api_paths_regex.\ append((p, r.match(normalized_target_path))) # Keep it consistent with the previous behavior target_path = target_path[len(base_path):] else: matching_api_paths = [] matching_api_paths_regex = [] if not matching_api_paths and not matching_api_paths_regex: fstr = MESSAGES['path']['no_matching_paths_found'].format(target_path) raise LookupError(fstr) elif len(matching_api_paths) == 1: return matching_api_paths[0] elif len(matching_api_paths) > 1: raise MultiplePathsFound( MESSAGES['path']['multiple_paths_found'].format( target_path, [v[0] for v in matching_api_paths], ) ) elif len(matching_api_paths_regex) == 1: return matching_api_paths_regex[0][0] elif len(matching_api_paths_regex) > 1: # TODO: This area needs improved logic. # We check to see if any of the matched paths is longers than # the others. If so, we *assume* it is the correct match. This is # going to be prone to false positives. in certain cases. matches_by_path_size = collections.defaultdict(list) for path, match in matching_api_paths_regex: matches_by_path_size[len(path)].append(path) longest_match = max(matches_by_path_size.keys()) if len(matches_by_path_size[longest_match]) == 1: return matches_by_path_size[longest_match][0] raise MultiplePathsFound( MESSAGES['path']['multiple_paths_found'].format( target_path, [v[0] for v in matching_api_paths_regex], ) ) else: return matching_api_paths_regex[0][0]
python
def match_path_to_api_path(path_definitions, target_path, base_path='', context=None): """ Match a request or response path to one of the api paths. Anything other than exactly one match is an error condition. """ if context is None: context = {} assert isinstance(context, collections.Mapping) if target_path.startswith(base_path): # Convert all of the api paths into Path instances for easier regex # matching. normalized_target_path = re.sub(NORMALIZE_SLASH_REGEX, '/', target_path) matching_api_paths = list() matching_api_paths_regex = list() for p, v in path_definitions.items(): # Doing this to help with case where we might have base_path # being just /, and then the path starts with / as well. full_path = re.sub(NORMALIZE_SLASH_REGEX, '/', base_path + p) r = path_to_regex( api_path=full_path, path_parameters=extract_path_parameters(v), operation_parameters=extract_operation_parameters(v), context=context, ) if full_path == normalized_target_path: matching_api_paths.append(p) elif r.match(normalized_target_path): matching_api_paths_regex.\ append((p, r.match(normalized_target_path))) # Keep it consistent with the previous behavior target_path = target_path[len(base_path):] else: matching_api_paths = [] matching_api_paths_regex = [] if not matching_api_paths and not matching_api_paths_regex: fstr = MESSAGES['path']['no_matching_paths_found'].format(target_path) raise LookupError(fstr) elif len(matching_api_paths) == 1: return matching_api_paths[0] elif len(matching_api_paths) > 1: raise MultiplePathsFound( MESSAGES['path']['multiple_paths_found'].format( target_path, [v[0] for v in matching_api_paths], ) ) elif len(matching_api_paths_regex) == 1: return matching_api_paths_regex[0][0] elif len(matching_api_paths_regex) > 1: # TODO: This area needs improved logic. # We check to see if any of the matched paths is longers than # the others. If so, we *assume* it is the correct match. This is # going to be prone to false positives. in certain cases. matches_by_path_size = collections.defaultdict(list) for path, match in matching_api_paths_regex: matches_by_path_size[len(path)].append(path) longest_match = max(matches_by_path_size.keys()) if len(matches_by_path_size[longest_match]) == 1: return matches_by_path_size[longest_match][0] raise MultiplePathsFound( MESSAGES['path']['multiple_paths_found'].format( target_path, [v[0] for v in matching_api_paths_regex], ) ) else: return matching_api_paths_regex[0][0]
[ "def", "match_path_to_api_path", "(", "path_definitions", ",", "target_path", ",", "base_path", "=", "''", ",", "context", "=", "None", ")", ":", "if", "context", "is", "None", ":", "context", "=", "{", "}", "assert", "isinstance", "(", "context", ",", "collections", ".", "Mapping", ")", "if", "target_path", ".", "startswith", "(", "base_path", ")", ":", "# Convert all of the api paths into Path instances for easier regex", "# matching.", "normalized_target_path", "=", "re", ".", "sub", "(", "NORMALIZE_SLASH_REGEX", ",", "'/'", ",", "target_path", ")", "matching_api_paths", "=", "list", "(", ")", "matching_api_paths_regex", "=", "list", "(", ")", "for", "p", ",", "v", "in", "path_definitions", ".", "items", "(", ")", ":", "# Doing this to help with case where we might have base_path", "# being just /, and then the path starts with / as well.", "full_path", "=", "re", ".", "sub", "(", "NORMALIZE_SLASH_REGEX", ",", "'/'", ",", "base_path", "+", "p", ")", "r", "=", "path_to_regex", "(", "api_path", "=", "full_path", ",", "path_parameters", "=", "extract_path_parameters", "(", "v", ")", ",", "operation_parameters", "=", "extract_operation_parameters", "(", "v", ")", ",", "context", "=", "context", ",", ")", "if", "full_path", "==", "normalized_target_path", ":", "matching_api_paths", ".", "append", "(", "p", ")", "elif", "r", ".", "match", "(", "normalized_target_path", ")", ":", "matching_api_paths_regex", ".", "append", "(", "(", "p", ",", "r", ".", "match", "(", "normalized_target_path", ")", ")", ")", "# Keep it consistent with the previous behavior", "target_path", "=", "target_path", "[", "len", "(", "base_path", ")", ":", "]", "else", ":", "matching_api_paths", "=", "[", "]", "matching_api_paths_regex", "=", "[", "]", "if", "not", "matching_api_paths", "and", "not", "matching_api_paths_regex", ":", "fstr", "=", "MESSAGES", "[", "'path'", "]", "[", "'no_matching_paths_found'", "]", ".", "format", "(", "target_path", ")", "raise", "LookupError", "(", "fstr", ")", "elif", "len", "(", "matching_api_paths", ")", "==", "1", ":", "return", "matching_api_paths", "[", "0", "]", "elif", "len", "(", "matching_api_paths", ")", ">", "1", ":", "raise", "MultiplePathsFound", "(", "MESSAGES", "[", "'path'", "]", "[", "'multiple_paths_found'", "]", ".", "format", "(", "target_path", ",", "[", "v", "[", "0", "]", "for", "v", "in", "matching_api_paths", "]", ",", ")", ")", "elif", "len", "(", "matching_api_paths_regex", ")", "==", "1", ":", "return", "matching_api_paths_regex", "[", "0", "]", "[", "0", "]", "elif", "len", "(", "matching_api_paths_regex", ")", ">", "1", ":", "# TODO: This area needs improved logic.", "# We check to see if any of the matched paths is longers than", "# the others. If so, we *assume* it is the correct match. This is", "# going to be prone to false positives. in certain cases.", "matches_by_path_size", "=", "collections", ".", "defaultdict", "(", "list", ")", "for", "path", ",", "match", "in", "matching_api_paths_regex", ":", "matches_by_path_size", "[", "len", "(", "path", ")", "]", ".", "append", "(", "path", ")", "longest_match", "=", "max", "(", "matches_by_path_size", ".", "keys", "(", ")", ")", "if", "len", "(", "matches_by_path_size", "[", "longest_match", "]", ")", "==", "1", ":", "return", "matches_by_path_size", "[", "longest_match", "]", "[", "0", "]", "raise", "MultiplePathsFound", "(", "MESSAGES", "[", "'path'", "]", "[", "'multiple_paths_found'", "]", ".", "format", "(", "target_path", ",", "[", "v", "[", "0", "]", "for", "v", "in", "matching_api_paths_regex", "]", ",", ")", ")", "else", ":", "return", "matching_api_paths_regex", "[", "0", "]", "[", "0", "]" ]
Match a request or response path to one of the api paths. Anything other than exactly one match is an error condition.
[ "Match", "a", "request", "or", "response", "path", "to", "one", "of", "the", "api", "paths", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/paths.py#L140-L209
train
pipermerriam/flex
flex/validation/request.py
validate_request
def validate_request(request, schema): """ Request validation does the following steps. 1. validate that the path matches one of the defined paths in the schema. 2. validate that the request method conforms to a supported methods for the given path. 3. validate that the request parameters conform to the parameter definitions for the operation definition. """ with ErrorDict() as errors: # 1 try: api_path = validate_path_to_api_path( path=request.path, context=schema, **schema ) except ValidationError as err: errors['path'].add_error(err.detail) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema['paths'][api_path] or {} if not path_definition: # TODO: is it valid to not have a definition for a path? return # 2 try: operation_definition = validate_request_method_to_operation( request_method=request.method, path_definition=path_definition, ) except ValidationError as err: errors['method'].add_error(err.detail) return if operation_definition is None: # TODO: is this compliant with swagger, can path operations have a null # definition? return # 3 operation_validators = construct_operation_validators( api_path=api_path, path_definition=path_definition, operation_definition=operation_definition, context=schema, ) try: validate_operation(request, operation_validators, context=schema) except ValidationError as err: errors['method'].add_error(err.detail)
python
def validate_request(request, schema): """ Request validation does the following steps. 1. validate that the path matches one of the defined paths in the schema. 2. validate that the request method conforms to a supported methods for the given path. 3. validate that the request parameters conform to the parameter definitions for the operation definition. """ with ErrorDict() as errors: # 1 try: api_path = validate_path_to_api_path( path=request.path, context=schema, **schema ) except ValidationError as err: errors['path'].add_error(err.detail) return # this causes an exception to be raised since errors is no longer falsy. path_definition = schema['paths'][api_path] or {} if not path_definition: # TODO: is it valid to not have a definition for a path? return # 2 try: operation_definition = validate_request_method_to_operation( request_method=request.method, path_definition=path_definition, ) except ValidationError as err: errors['method'].add_error(err.detail) return if operation_definition is None: # TODO: is this compliant with swagger, can path operations have a null # definition? return # 3 operation_validators = construct_operation_validators( api_path=api_path, path_definition=path_definition, operation_definition=operation_definition, context=schema, ) try: validate_operation(request, operation_validators, context=schema) except ValidationError as err: errors['method'].add_error(err.detail)
[ "def", "validate_request", "(", "request", ",", "schema", ")", ":", "with", "ErrorDict", "(", ")", "as", "errors", ":", "# 1", "try", ":", "api_path", "=", "validate_path_to_api_path", "(", "path", "=", "request", ".", "path", ",", "context", "=", "schema", ",", "*", "*", "schema", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'path'", "]", ".", "add_error", "(", "err", ".", "detail", ")", "return", "# this causes an exception to be raised since errors is no longer falsy.", "path_definition", "=", "schema", "[", "'paths'", "]", "[", "api_path", "]", "or", "{", "}", "if", "not", "path_definition", ":", "# TODO: is it valid to not have a definition for a path?", "return", "# 2", "try", ":", "operation_definition", "=", "validate_request_method_to_operation", "(", "request_method", "=", "request", ".", "method", ",", "path_definition", "=", "path_definition", ",", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'method'", "]", ".", "add_error", "(", "err", ".", "detail", ")", "return", "if", "operation_definition", "is", "None", ":", "# TODO: is this compliant with swagger, can path operations have a null", "# definition?", "return", "# 3", "operation_validators", "=", "construct_operation_validators", "(", "api_path", "=", "api_path", ",", "path_definition", "=", "path_definition", ",", "operation_definition", "=", "operation_definition", ",", "context", "=", "schema", ",", ")", "try", ":", "validate_operation", "(", "request", ",", "operation_validators", ",", "context", "=", "schema", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "'method'", "]", ".", "add_error", "(", "err", ".", "detail", ")" ]
Request validation does the following steps. 1. validate that the path matches one of the defined paths in the schema. 2. validate that the request method conforms to a supported methods for the given path. 3. validate that the request parameters conform to the parameter definitions for the operation definition.
[ "Request", "validation", "does", "the", "following", "steps", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/request.py#L13-L65
train
pipermerriam/flex
flex/http.py
normalize_request
def normalize_request(request): """ Given a request, normalize it to the internal Request class. """ if isinstance(request, Request): return request for normalizer in REQUEST_NORMALIZERS: try: return normalizer(request) except TypeError: continue raise ValueError("Unable to normalize the provided request")
python
def normalize_request(request): """ Given a request, normalize it to the internal Request class. """ if isinstance(request, Request): return request for normalizer in REQUEST_NORMALIZERS: try: return normalizer(request) except TypeError: continue raise ValueError("Unable to normalize the provided request")
[ "def", "normalize_request", "(", "request", ")", ":", "if", "isinstance", "(", "request", ",", "Request", ")", ":", "return", "request", "for", "normalizer", "in", "REQUEST_NORMALIZERS", ":", "try", ":", "return", "normalizer", "(", "request", ")", "except", "TypeError", ":", "continue", "raise", "ValueError", "(", "\"Unable to normalize the provided request\"", ")" ]
Given a request, normalize it to the internal Request class.
[ "Given", "a", "request", "normalize", "it", "to", "the", "internal", "Request", "class", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/http.py#L279-L292
train
pipermerriam/flex
flex/http.py
normalize_response
def normalize_response(response, request=None): """ Given a response, normalize it to the internal Response class. This also involves normalizing the associated request object. """ if isinstance(response, Response): return response if request is not None and not isinstance(request, Request): request = normalize_request(request) for normalizer in RESPONSE_NORMALIZERS: try: return normalizer(response, request=request) except TypeError: continue raise ValueError("Unable to normalize the provided response")
python
def normalize_response(response, request=None): """ Given a response, normalize it to the internal Response class. This also involves normalizing the associated request object. """ if isinstance(response, Response): return response if request is not None and not isinstance(request, Request): request = normalize_request(request) for normalizer in RESPONSE_NORMALIZERS: try: return normalizer(response, request=request) except TypeError: continue raise ValueError("Unable to normalize the provided response")
[ "def", "normalize_response", "(", "response", ",", "request", "=", "None", ")", ":", "if", "isinstance", "(", "response", ",", "Response", ")", ":", "return", "response", "if", "request", "is", "not", "None", "and", "not", "isinstance", "(", "request", ",", "Request", ")", ":", "request", "=", "normalize_request", "(", "request", ")", "for", "normalizer", "in", "RESPONSE_NORMALIZERS", ":", "try", ":", "return", "normalizer", "(", "response", ",", "request", "=", "request", ")", "except", "TypeError", ":", "continue", "raise", "ValueError", "(", "\"Unable to normalize the provided response\"", ")" ]
Given a response, normalize it to the internal Response class. This also involves normalizing the associated request object.
[ "Given", "a", "response", "normalize", "it", "to", "the", "internal", "Response", "class", ".", "This", "also", "involves", "normalizing", "the", "associated", "request", "object", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/http.py#L474-L490
train
pipermerriam/flex
flex/validation/operation.py
generate_header_validator
def generate_header_validator(headers, context, **kwargs): """ Generates a validation function that will validate a dictionary of headers. """ validators = ValidationDict() for header_definition in headers: header_processor = generate_value_processor( context=context, **header_definition ) header_validator = generate_object_validator( field_validators=construct_header_validators(header_definition, context=context), ) validators.add_property_validator( header_definition['name'], chain_reduce_partial( header_processor, header_validator, ), ) return generate_object_validator(field_validators=validators)
python
def generate_header_validator(headers, context, **kwargs): """ Generates a validation function that will validate a dictionary of headers. """ validators = ValidationDict() for header_definition in headers: header_processor = generate_value_processor( context=context, **header_definition ) header_validator = generate_object_validator( field_validators=construct_header_validators(header_definition, context=context), ) validators.add_property_validator( header_definition['name'], chain_reduce_partial( header_processor, header_validator, ), ) return generate_object_validator(field_validators=validators)
[ "def", "generate_header_validator", "(", "headers", ",", "context", ",", "*", "*", "kwargs", ")", ":", "validators", "=", "ValidationDict", "(", ")", "for", "header_definition", "in", "headers", ":", "header_processor", "=", "generate_value_processor", "(", "context", "=", "context", ",", "*", "*", "header_definition", ")", "header_validator", "=", "generate_object_validator", "(", "field_validators", "=", "construct_header_validators", "(", "header_definition", ",", "context", "=", "context", ")", ",", ")", "validators", ".", "add_property_validator", "(", "header_definition", "[", "'name'", "]", ",", "chain_reduce_partial", "(", "header_processor", ",", "header_validator", ",", ")", ",", ")", "return", "generate_object_validator", "(", "field_validators", "=", "validators", ")" ]
Generates a validation function that will validate a dictionary of headers.
[ "Generates", "a", "validation", "function", "that", "will", "validate", "a", "dictionary", "of", "headers", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/operation.py#L62-L82
train
pipermerriam/flex
flex/validation/operation.py
generate_parameters_validator
def generate_parameters_validator(api_path, path_definition, parameters, context, **kwargs): """ Generates a validator function to validate. - request.path against the path parameters. - request.query against the query parameters. - request.headers against the header parameters. - TODO: request.body against the body parameters. - TODO: request.formData against any form data. """ # TODO: figure out how to merge this with the same code in response # validation. validators = ValidationDict() path_level_parameters = dereference_parameter_list( path_definition.get('parameters', []), context, ) operation_level_parameters = dereference_parameter_list( parameters, context, ) all_parameters = merge_parameter_lists( path_level_parameters, operation_level_parameters, ) # PATH in_path_parameters = filter_parameters(all_parameters, in_=PATH) validators.add_validator( 'path', chain_reduce_partial( attrgetter('path'), generate_path_parameters_validator(api_path, in_path_parameters, context), ), ) # QUERY in_query_parameters = filter_parameters(all_parameters, in_=QUERY) validators.add_validator( 'query', chain_reduce_partial( attrgetter('query_data'), functools.partial( validate_query_parameters, query_parameters=in_query_parameters, context=context, ), ), ) # HEADERS in_header_parameters = filter_parameters(all_parameters, in_=HEADER) validators.add_validator( 'headers', chain_reduce_partial( attrgetter('headers'), generate_header_validator(in_header_parameters, context), ), ) # FORM_DATA # in_form_data_parameters = filter_parameters(all_parameters, in_=FORM_DATA) # validators.add_validator( # 'form_data', # chain_reduce_partial( # attrgetter('data'), # generate_form_data_validator(in_form_data_parameters, context), # ) # ) # REQUEST_BODY in_request_body_parameters = filter_parameters(all_parameters, in_=BODY) validators.add_validator( 'request_body', chain_reduce_partial( attrgetter('data'), generate_request_body_validator(in_request_body_parameters, context), ) ) return generate_object_validator(field_validators=validators)
python
def generate_parameters_validator(api_path, path_definition, parameters, context, **kwargs): """ Generates a validator function to validate. - request.path against the path parameters. - request.query against the query parameters. - request.headers against the header parameters. - TODO: request.body against the body parameters. - TODO: request.formData against any form data. """ # TODO: figure out how to merge this with the same code in response # validation. validators = ValidationDict() path_level_parameters = dereference_parameter_list( path_definition.get('parameters', []), context, ) operation_level_parameters = dereference_parameter_list( parameters, context, ) all_parameters = merge_parameter_lists( path_level_parameters, operation_level_parameters, ) # PATH in_path_parameters = filter_parameters(all_parameters, in_=PATH) validators.add_validator( 'path', chain_reduce_partial( attrgetter('path'), generate_path_parameters_validator(api_path, in_path_parameters, context), ), ) # QUERY in_query_parameters = filter_parameters(all_parameters, in_=QUERY) validators.add_validator( 'query', chain_reduce_partial( attrgetter('query_data'), functools.partial( validate_query_parameters, query_parameters=in_query_parameters, context=context, ), ), ) # HEADERS in_header_parameters = filter_parameters(all_parameters, in_=HEADER) validators.add_validator( 'headers', chain_reduce_partial( attrgetter('headers'), generate_header_validator(in_header_parameters, context), ), ) # FORM_DATA # in_form_data_parameters = filter_parameters(all_parameters, in_=FORM_DATA) # validators.add_validator( # 'form_data', # chain_reduce_partial( # attrgetter('data'), # generate_form_data_validator(in_form_data_parameters, context), # ) # ) # REQUEST_BODY in_request_body_parameters = filter_parameters(all_parameters, in_=BODY) validators.add_validator( 'request_body', chain_reduce_partial( attrgetter('data'), generate_request_body_validator(in_request_body_parameters, context), ) ) return generate_object_validator(field_validators=validators)
[ "def", "generate_parameters_validator", "(", "api_path", ",", "path_definition", ",", "parameters", ",", "context", ",", "*", "*", "kwargs", ")", ":", "# TODO: figure out how to merge this with the same code in response", "# validation.", "validators", "=", "ValidationDict", "(", ")", "path_level_parameters", "=", "dereference_parameter_list", "(", "path_definition", ".", "get", "(", "'parameters'", ",", "[", "]", ")", ",", "context", ",", ")", "operation_level_parameters", "=", "dereference_parameter_list", "(", "parameters", ",", "context", ",", ")", "all_parameters", "=", "merge_parameter_lists", "(", "path_level_parameters", ",", "operation_level_parameters", ",", ")", "# PATH", "in_path_parameters", "=", "filter_parameters", "(", "all_parameters", ",", "in_", "=", "PATH", ")", "validators", ".", "add_validator", "(", "'path'", ",", "chain_reduce_partial", "(", "attrgetter", "(", "'path'", ")", ",", "generate_path_parameters_validator", "(", "api_path", ",", "in_path_parameters", ",", "context", ")", ",", ")", ",", ")", "# QUERY", "in_query_parameters", "=", "filter_parameters", "(", "all_parameters", ",", "in_", "=", "QUERY", ")", "validators", ".", "add_validator", "(", "'query'", ",", "chain_reduce_partial", "(", "attrgetter", "(", "'query_data'", ")", ",", "functools", ".", "partial", "(", "validate_query_parameters", ",", "query_parameters", "=", "in_query_parameters", ",", "context", "=", "context", ",", ")", ",", ")", ",", ")", "# HEADERS", "in_header_parameters", "=", "filter_parameters", "(", "all_parameters", ",", "in_", "=", "HEADER", ")", "validators", ".", "add_validator", "(", "'headers'", ",", "chain_reduce_partial", "(", "attrgetter", "(", "'headers'", ")", ",", "generate_header_validator", "(", "in_header_parameters", ",", "context", ")", ",", ")", ",", ")", "# FORM_DATA", "# in_form_data_parameters = filter_parameters(all_parameters, in_=FORM_DATA)", "# validators.add_validator(", "# 'form_data',", "# chain_reduce_partial(", "# attrgetter('data'),", "# generate_form_data_validator(in_form_data_parameters, context),", "# )", "# )", "# REQUEST_BODY", "in_request_body_parameters", "=", "filter_parameters", "(", "all_parameters", ",", "in_", "=", "BODY", ")", "validators", ".", "add_validator", "(", "'request_body'", ",", "chain_reduce_partial", "(", "attrgetter", "(", "'data'", ")", ",", "generate_request_body_validator", "(", "in_request_body_parameters", ",", "context", ")", ",", ")", ")", "return", "generate_object_validator", "(", "field_validators", "=", "validators", ")" ]
Generates a validator function to validate. - request.path against the path parameters. - request.query against the query parameters. - request.headers against the header parameters. - TODO: request.body against the body parameters. - TODO: request.formData against any form data.
[ "Generates", "a", "validator", "function", "to", "validate", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/operation.py#L100-L182
train
pipermerriam/flex
flex/decorators.py
partial_safe_wraps
def partial_safe_wraps(wrapped_func, *args, **kwargs): """ A version of `functools.wraps` that is safe to wrap a partial in. """ if isinstance(wrapped_func, functools.partial): return partial_safe_wraps(wrapped_func.func) else: return functools.wraps(wrapped_func)
python
def partial_safe_wraps(wrapped_func, *args, **kwargs): """ A version of `functools.wraps` that is safe to wrap a partial in. """ if isinstance(wrapped_func, functools.partial): return partial_safe_wraps(wrapped_func.func) else: return functools.wraps(wrapped_func)
[ "def", "partial_safe_wraps", "(", "wrapped_func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "isinstance", "(", "wrapped_func", ",", "functools", ".", "partial", ")", ":", "return", "partial_safe_wraps", "(", "wrapped_func", ".", "func", ")", "else", ":", "return", "functools", ".", "wraps", "(", "wrapped_func", ")" ]
A version of `functools.wraps` that is safe to wrap a partial in.
[ "A", "version", "of", "functools", ".", "wraps", "that", "is", "safe", "to", "wrap", "a", "partial", "in", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/decorators.py#L10-L17
train
pipermerriam/flex
flex/decorators.py
skip_if_empty
def skip_if_empty(func): """ Decorator for validation functions which makes them pass if the value passed in is the EMPTY sentinal value. """ @partial_safe_wraps(func) def inner(value, *args, **kwargs): if value is EMPTY: return else: return func(value, *args, **kwargs) return inner
python
def skip_if_empty(func): """ Decorator for validation functions which makes them pass if the value passed in is the EMPTY sentinal value. """ @partial_safe_wraps(func) def inner(value, *args, **kwargs): if value is EMPTY: return else: return func(value, *args, **kwargs) return inner
[ "def", "skip_if_empty", "(", "func", ")", ":", "@", "partial_safe_wraps", "(", "func", ")", "def", "inner", "(", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "value", "is", "EMPTY", ":", "return", "else", ":", "return", "func", "(", "value", ",", "*", "args", ",", "*", "*", "kwargs", ")", "return", "inner" ]
Decorator for validation functions which makes them pass if the value passed in is the EMPTY sentinal value.
[ "Decorator", "for", "validation", "functions", "which", "makes", "them", "pass", "if", "the", "value", "passed", "in", "is", "the", "EMPTY", "sentinal", "value", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/decorators.py#L42-L53
train
pipermerriam/flex
flex/decorators.py
rewrite_reserved_words
def rewrite_reserved_words(func): """ Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word. """ @partial_safe_wraps(func) def inner(*args, **kwargs): for word in RESERVED_WORDS: key = "{0}_".format(word) if key in kwargs: kwargs[word] = kwargs.pop(key) return func(*args, **kwargs) return inner
python
def rewrite_reserved_words(func): """ Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word. """ @partial_safe_wraps(func) def inner(*args, **kwargs): for word in RESERVED_WORDS: key = "{0}_".format(word) if key in kwargs: kwargs[word] = kwargs.pop(key) return func(*args, **kwargs) return inner
[ "def", "rewrite_reserved_words", "(", "func", ")", ":", "@", "partial_safe_wraps", "(", "func", ")", "def", "inner", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "for", "word", "in", "RESERVED_WORDS", ":", "key", "=", "\"{0}_\"", ".", "format", "(", "word", ")", "if", "key", "in", "kwargs", ":", "kwargs", "[", "word", "]", "=", "kwargs", ".", "pop", "(", "key", ")", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "inner" ]
Given a function whos kwargs need to contain a reserved word such as `in`, allow calling that function with the keyword as `in_`, such that function kwargs are rewritten to use the reserved word.
[ "Given", "a", "function", "whos", "kwargs", "need", "to", "contain", "a", "reserved", "word", "such", "as", "in", "allow", "calling", "that", "function", "with", "the", "keyword", "as", "in_", "such", "that", "function", "kwargs", "are", "rewritten", "to", "use", "the", "reserved", "word", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/decorators.py#L74-L87
train
pipermerriam/flex
flex/validation/utils.py
any_validator
def any_validator(obj, validators, **kwargs): """ Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors. """ if not len(validators) > 1: raise ValueError( "any_validator requires at least 2 validator. Only got " "{0}".format(len(validators)) ) errors = ErrorDict() for key, validator in validators.items(): try: validator(obj, **kwargs) except ValidationError as err: errors[key] = err.detail else: break else: if len(errors) == 1: # Special case for a single error. Just raise it as if it was the # only validator run. error = errors.values()[0] raise ValidationError(error) else: # Raise all of the errors with the key namespaces. errors.raise_()
python
def any_validator(obj, validators, **kwargs): """ Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors. """ if not len(validators) > 1: raise ValueError( "any_validator requires at least 2 validator. Only got " "{0}".format(len(validators)) ) errors = ErrorDict() for key, validator in validators.items(): try: validator(obj, **kwargs) except ValidationError as err: errors[key] = err.detail else: break else: if len(errors) == 1: # Special case for a single error. Just raise it as if it was the # only validator run. error = errors.values()[0] raise ValidationError(error) else: # Raise all of the errors with the key namespaces. errors.raise_()
[ "def", "any_validator", "(", "obj", ",", "validators", ",", "*", "*", "kwargs", ")", ":", "if", "not", "len", "(", "validators", ")", ">", "1", ":", "raise", "ValueError", "(", "\"any_validator requires at least 2 validator. Only got \"", "\"{0}\"", ".", "format", "(", "len", "(", "validators", ")", ")", ")", "errors", "=", "ErrorDict", "(", ")", "for", "key", ",", "validator", "in", "validators", ".", "items", "(", ")", ":", "try", ":", "validator", "(", "obj", ",", "*", "*", "kwargs", ")", "except", "ValidationError", "as", "err", ":", "errors", "[", "key", "]", "=", "err", ".", "detail", "else", ":", "break", "else", ":", "if", "len", "(", "errors", ")", "==", "1", ":", "# Special case for a single error. Just raise it as if it was the", "# only validator run.", "error", "=", "errors", ".", "values", "(", ")", "[", "0", "]", "raise", "ValidationError", "(", "error", ")", "else", ":", "# Raise all of the errors with the key namespaces.", "errors", ".", "raise_", "(", ")" ]
Attempt multiple validators on an object. - If any pass, then all validation passes. - Otherwise, raise all of the errors.
[ "Attempt", "multiple", "validators", "on", "an", "object", "." ]
233f8149fb851a6255753bcec948cb6fefb2723b
https://github.com/pipermerriam/flex/blob/233f8149fb851a6255753bcec948cb6fefb2723b/flex/validation/utils.py#L9-L37
train
toabctl/metaextract
metaextract/utils.py
_extract_to_tempdir
def _extract_to_tempdir(archive_filename): """extract the given tarball or zipfile to a tempdir and change the cwd to the new tempdir. Delete the tempdir at the end""" if not os.path.exists(archive_filename): raise Exception("Archive '%s' does not exist" % (archive_filename)) tempdir = tempfile.mkdtemp(prefix="metaextract_") current_cwd = os.getcwd() try: if tarfile.is_tarfile(archive_filename): with tarfile.open(archive_filename) as f: f.extractall(tempdir) elif zipfile.is_zipfile(archive_filename): with zipfile.ZipFile(archive_filename) as f: f.extractall(tempdir) else: raise Exception("Can not extract '%s'. " "Not a tar or zip file" % archive_filename) os.chdir(tempdir) yield tempdir finally: os.chdir(current_cwd) shutil.rmtree(tempdir)
python
def _extract_to_tempdir(archive_filename): """extract the given tarball or zipfile to a tempdir and change the cwd to the new tempdir. Delete the tempdir at the end""" if not os.path.exists(archive_filename): raise Exception("Archive '%s' does not exist" % (archive_filename)) tempdir = tempfile.mkdtemp(prefix="metaextract_") current_cwd = os.getcwd() try: if tarfile.is_tarfile(archive_filename): with tarfile.open(archive_filename) as f: f.extractall(tempdir) elif zipfile.is_zipfile(archive_filename): with zipfile.ZipFile(archive_filename) as f: f.extractall(tempdir) else: raise Exception("Can not extract '%s'. " "Not a tar or zip file" % archive_filename) os.chdir(tempdir) yield tempdir finally: os.chdir(current_cwd) shutil.rmtree(tempdir)
[ "def", "_extract_to_tempdir", "(", "archive_filename", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "archive_filename", ")", ":", "raise", "Exception", "(", "\"Archive '%s' does not exist\"", "%", "(", "archive_filename", ")", ")", "tempdir", "=", "tempfile", ".", "mkdtemp", "(", "prefix", "=", "\"metaextract_\"", ")", "current_cwd", "=", "os", ".", "getcwd", "(", ")", "try", ":", "if", "tarfile", ".", "is_tarfile", "(", "archive_filename", ")", ":", "with", "tarfile", ".", "open", "(", "archive_filename", ")", "as", "f", ":", "f", ".", "extractall", "(", "tempdir", ")", "elif", "zipfile", ".", "is_zipfile", "(", "archive_filename", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "archive_filename", ")", "as", "f", ":", "f", ".", "extractall", "(", "tempdir", ")", "else", ":", "raise", "Exception", "(", "\"Can not extract '%s'. \"", "\"Not a tar or zip file\"", "%", "archive_filename", ")", "os", ".", "chdir", "(", "tempdir", ")", "yield", "tempdir", "finally", ":", "os", ".", "chdir", "(", "current_cwd", ")", "shutil", ".", "rmtree", "(", "tempdir", ")" ]
extract the given tarball or zipfile to a tempdir and change the cwd to the new tempdir. Delete the tempdir at the end
[ "extract", "the", "given", "tarball", "or", "zipfile", "to", "a", "tempdir", "and", "change", "the", "cwd", "to", "the", "new", "tempdir", ".", "Delete", "the", "tempdir", "at", "the", "end" ]
0515490b5983d888bbbaec5fdb5a0a4214743335
https://github.com/toabctl/metaextract/blob/0515490b5983d888bbbaec5fdb5a0a4214743335/metaextract/utils.py#L37-L59
train
toabctl/metaextract
metaextract/utils.py
_enter_single_subdir
def _enter_single_subdir(root_dir): """if the given directory has just a single subdir, enter that""" current_cwd = os.getcwd() try: dest_dir = root_dir dir_list = os.listdir(root_dir) if len(dir_list) == 1: first = os.path.join(root_dir, dir_list[0]) if os.path.isdir(first): dest_dir = first else: dest_dir = root_dir os.chdir(dest_dir) yield dest_dir finally: os.chdir(current_cwd)
python
def _enter_single_subdir(root_dir): """if the given directory has just a single subdir, enter that""" current_cwd = os.getcwd() try: dest_dir = root_dir dir_list = os.listdir(root_dir) if len(dir_list) == 1: first = os.path.join(root_dir, dir_list[0]) if os.path.isdir(first): dest_dir = first else: dest_dir = root_dir os.chdir(dest_dir) yield dest_dir finally: os.chdir(current_cwd)
[ "def", "_enter_single_subdir", "(", "root_dir", ")", ":", "current_cwd", "=", "os", ".", "getcwd", "(", ")", "try", ":", "dest_dir", "=", "root_dir", "dir_list", "=", "os", ".", "listdir", "(", "root_dir", ")", "if", "len", "(", "dir_list", ")", "==", "1", ":", "first", "=", "os", ".", "path", ".", "join", "(", "root_dir", ",", "dir_list", "[", "0", "]", ")", "if", "os", ".", "path", ".", "isdir", "(", "first", ")", ":", "dest_dir", "=", "first", "else", ":", "dest_dir", "=", "root_dir", "os", ".", "chdir", "(", "dest_dir", ")", "yield", "dest_dir", "finally", ":", "os", ".", "chdir", "(", "current_cwd", ")" ]
if the given directory has just a single subdir, enter that
[ "if", "the", "given", "directory", "has", "just", "a", "single", "subdir", "enter", "that" ]
0515490b5983d888bbbaec5fdb5a0a4214743335
https://github.com/toabctl/metaextract/blob/0515490b5983d888bbbaec5fdb5a0a4214743335/metaextract/utils.py#L63-L78
train
toabctl/metaextract
metaextract/utils.py
_set_file_encoding_utf8
def _set_file_encoding_utf8(filename): """set a encoding header as suggested in PEP-0263. This is not entirely correct because we don't know the encoding of the given file but it's at least a chance to get metadata from the setup.py""" with open(filename, 'r+') as f: content = f.read() f.seek(0, 0) f.write("# -*- coding: utf-8 -*-\n" + content)
python
def _set_file_encoding_utf8(filename): """set a encoding header as suggested in PEP-0263. This is not entirely correct because we don't know the encoding of the given file but it's at least a chance to get metadata from the setup.py""" with open(filename, 'r+') as f: content = f.read() f.seek(0, 0) f.write("# -*- coding: utf-8 -*-\n" + content)
[ "def", "_set_file_encoding_utf8", "(", "filename", ")", ":", "with", "open", "(", "filename", ",", "'r+'", ")", "as", "f", ":", "content", "=", "f", ".", "read", "(", ")", "f", ".", "seek", "(", "0", ",", "0", ")", "f", ".", "write", "(", "\"# -*- coding: utf-8 -*-\\n\"", "+", "content", ")" ]
set a encoding header as suggested in PEP-0263. This is not entirely correct because we don't know the encoding of the given file but it's at least a chance to get metadata from the setup.py
[ "set", "a", "encoding", "header", "as", "suggested", "in", "PEP", "-", "0263", ".", "This", "is", "not", "entirely", "correct", "because", "we", "don", "t", "know", "the", "encoding", "of", "the", "given", "file", "but", "it", "s", "at", "least", "a", "chance", "to", "get", "metadata", "from", "the", "setup", ".", "py" ]
0515490b5983d888bbbaec5fdb5a0a4214743335
https://github.com/toabctl/metaextract/blob/0515490b5983d888bbbaec5fdb5a0a4214743335/metaextract/utils.py#L81-L88
train
toabctl/metaextract
metaextract/utils.py
_setup_py_run_from_dir
def _setup_py_run_from_dir(root_dir, py_interpreter): """run the extractmeta command via the setup.py in the given root_dir. the output of extractmeta is json and is stored in a tempfile which is then read in and returned as data""" data = {} with _enter_single_subdir(root_dir) as single_subdir: if not os.path.exists("setup.py"): raise Exception("'setup.py' does not exist in '%s'" % ( single_subdir)) # generate a temporary json file which contains the metadata output_json = tempfile.NamedTemporaryFile() cmd = "%s setup.py -q --command-packages metaextract " \ "metaextract -o %s " % (py_interpreter, output_json.name) try: subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError: # try again with a encoding in setup.py _set_file_encoding_utf8("setup.py") subprocess.check_output(cmd, shell=True) # read json file and return data with open(output_json.name, "r") as f: data = json.loads(f.read()) # sort some of the keys if the dict values are lists for key in ['data_files', 'entry_points', 'extras_require', 'install_requires', 'setup_requires', 'scripts', 'tests_require', 'tests_suite']: if key in data['data'] and isinstance(data['data'][key], list): data['data'][key] = sorted(data['data'][key]) return data
python
def _setup_py_run_from_dir(root_dir, py_interpreter): """run the extractmeta command via the setup.py in the given root_dir. the output of extractmeta is json and is stored in a tempfile which is then read in and returned as data""" data = {} with _enter_single_subdir(root_dir) as single_subdir: if not os.path.exists("setup.py"): raise Exception("'setup.py' does not exist in '%s'" % ( single_subdir)) # generate a temporary json file which contains the metadata output_json = tempfile.NamedTemporaryFile() cmd = "%s setup.py -q --command-packages metaextract " \ "metaextract -o %s " % (py_interpreter, output_json.name) try: subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError: # try again with a encoding in setup.py _set_file_encoding_utf8("setup.py") subprocess.check_output(cmd, shell=True) # read json file and return data with open(output_json.name, "r") as f: data = json.loads(f.read()) # sort some of the keys if the dict values are lists for key in ['data_files', 'entry_points', 'extras_require', 'install_requires', 'setup_requires', 'scripts', 'tests_require', 'tests_suite']: if key in data['data'] and isinstance(data['data'][key], list): data['data'][key] = sorted(data['data'][key]) return data
[ "def", "_setup_py_run_from_dir", "(", "root_dir", ",", "py_interpreter", ")", ":", "data", "=", "{", "}", "with", "_enter_single_subdir", "(", "root_dir", ")", "as", "single_subdir", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "\"setup.py\"", ")", ":", "raise", "Exception", "(", "\"'setup.py' does not exist in '%s'\"", "%", "(", "single_subdir", ")", ")", "# generate a temporary json file which contains the metadata", "output_json", "=", "tempfile", ".", "NamedTemporaryFile", "(", ")", "cmd", "=", "\"%s setup.py -q --command-packages metaextract \"", "\"metaextract -o %s \"", "%", "(", "py_interpreter", ",", "output_json", ".", "name", ")", "try", ":", "subprocess", ".", "check_output", "(", "cmd", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "shell", "=", "True", ")", "except", "subprocess", ".", "CalledProcessError", ":", "# try again with a encoding in setup.py", "_set_file_encoding_utf8", "(", "\"setup.py\"", ")", "subprocess", ".", "check_output", "(", "cmd", ",", "shell", "=", "True", ")", "# read json file and return data", "with", "open", "(", "output_json", ".", "name", ",", "\"r\"", ")", "as", "f", ":", "data", "=", "json", ".", "loads", "(", "f", ".", "read", "(", ")", ")", "# sort some of the keys if the dict values are lists", "for", "key", "in", "[", "'data_files'", ",", "'entry_points'", ",", "'extras_require'", ",", "'install_requires'", ",", "'setup_requires'", ",", "'scripts'", ",", "'tests_require'", ",", "'tests_suite'", "]", ":", "if", "key", "in", "data", "[", "'data'", "]", "and", "isinstance", "(", "data", "[", "'data'", "]", "[", "key", "]", ",", "list", ")", ":", "data", "[", "'data'", "]", "[", "key", "]", "=", "sorted", "(", "data", "[", "'data'", "]", "[", "key", "]", ")", "return", "data" ]
run the extractmeta command via the setup.py in the given root_dir. the output of extractmeta is json and is stored in a tempfile which is then read in and returned as data
[ "run", "the", "extractmeta", "command", "via", "the", "setup", ".", "py", "in", "the", "given", "root_dir", ".", "the", "output", "of", "extractmeta", "is", "json", "and", "is", "stored", "in", "a", "tempfile", "which", "is", "then", "read", "in", "and", "returned", "as", "data" ]
0515490b5983d888bbbaec5fdb5a0a4214743335
https://github.com/toabctl/metaextract/blob/0515490b5983d888bbbaec5fdb5a0a4214743335/metaextract/utils.py#L91-L121
train
toabctl/metaextract
metaextract/utils.py
from_archive
def from_archive(archive_filename, py_interpreter=sys.executable): """extract metadata from a given sdist archive file :param archive_filename: a sdist archive file :param py_interpreter: The full path to the used python interpreter :returns: a json blob with metadata """ with _extract_to_tempdir(archive_filename) as root_dir: data = _setup_py_run_from_dir(root_dir, py_interpreter) return data
python
def from_archive(archive_filename, py_interpreter=sys.executable): """extract metadata from a given sdist archive file :param archive_filename: a sdist archive file :param py_interpreter: The full path to the used python interpreter :returns: a json blob with metadata """ with _extract_to_tempdir(archive_filename) as root_dir: data = _setup_py_run_from_dir(root_dir, py_interpreter) return data
[ "def", "from_archive", "(", "archive_filename", ",", "py_interpreter", "=", "sys", ".", "executable", ")", ":", "with", "_extract_to_tempdir", "(", "archive_filename", ")", "as", "root_dir", ":", "data", "=", "_setup_py_run_from_dir", "(", "root_dir", ",", "py_interpreter", ")", "return", "data" ]
extract metadata from a given sdist archive file :param archive_filename: a sdist archive file :param py_interpreter: The full path to the used python interpreter :returns: a json blob with metadata
[ "extract", "metadata", "from", "a", "given", "sdist", "archive", "file" ]
0515490b5983d888bbbaec5fdb5a0a4214743335
https://github.com/toabctl/metaextract/blob/0515490b5983d888bbbaec5fdb5a0a4214743335/metaextract/utils.py#L125-L135
train
rwl/PyCIM
PyCIM/RDFXMLReader.py
xmlns
def xmlns(source): """ Returns a map of prefix to namespace for the given XML file. """ namespaces = {} events=("end", "start-ns", "end-ns") for (event, elem) in iterparse(source, events): if event == "start-ns": prefix, ns = elem namespaces[prefix] = ns elif event == "end": break # Reset stream if hasattr(source, "seek"): source.seek(0) return namespaces
python
def xmlns(source): """ Returns a map of prefix to namespace for the given XML file. """ namespaces = {} events=("end", "start-ns", "end-ns") for (event, elem) in iterparse(source, events): if event == "start-ns": prefix, ns = elem namespaces[prefix] = ns elif event == "end": break # Reset stream if hasattr(source, "seek"): source.seek(0) return namespaces
[ "def", "xmlns", "(", "source", ")", ":", "namespaces", "=", "{", "}", "events", "=", "(", "\"end\"", ",", "\"start-ns\"", ",", "\"end-ns\"", ")", "for", "(", "event", ",", "elem", ")", "in", "iterparse", "(", "source", ",", "events", ")", ":", "if", "event", "==", "\"start-ns\"", ":", "prefix", ",", "ns", "=", "elem", "namespaces", "[", "prefix", "]", "=", "ns", "elif", "event", "==", "\"end\"", ":", "break", "# Reset stream", "if", "hasattr", "(", "source", ",", "\"seek\"", ")", ":", "source", ".", "seek", "(", "0", ")", "return", "namespaces" ]
Returns a map of prefix to namespace for the given XML file.
[ "Returns", "a", "map", "of", "prefix", "to", "namespace", "for", "the", "given", "XML", "file", "." ]
4a12ebb5a7fb03c7790d396910daef9b97c4ef99
https://github.com/rwl/PyCIM/blob/4a12ebb5a7fb03c7790d396910daef9b97c4ef99/PyCIM/RDFXMLReader.py#L226-L244
train
adafruit/Adafruit_Python_PN532
examples/mcpi_listen.py
create_block
def create_block(mc, block_id, subtype=None): """Build a block with the specified id and subtype under the player in the Minecraft world. Subtype is optional and can be specified as None to use the default subtype for the block. """ # Get player tile position and real position. ptx, pty, ptz = mc.player.getTilePos() px, py, pz = mc.player.getPos() # Create block at current player tile location. if subtype is None: mc.setBlock(ptx, pty, ptz, block_id) else: mc.setBlock(ptx, pty, ptz, block_id, subtype) # Move the player's real positon up one block. mc.player.setPos(px, py+1, pz)
python
def create_block(mc, block_id, subtype=None): """Build a block with the specified id and subtype under the player in the Minecraft world. Subtype is optional and can be specified as None to use the default subtype for the block. """ # Get player tile position and real position. ptx, pty, ptz = mc.player.getTilePos() px, py, pz = mc.player.getPos() # Create block at current player tile location. if subtype is None: mc.setBlock(ptx, pty, ptz, block_id) else: mc.setBlock(ptx, pty, ptz, block_id, subtype) # Move the player's real positon up one block. mc.player.setPos(px, py+1, pz)
[ "def", "create_block", "(", "mc", ",", "block_id", ",", "subtype", "=", "None", ")", ":", "# Get player tile position and real position.", "ptx", ",", "pty", ",", "ptz", "=", "mc", ".", "player", ".", "getTilePos", "(", ")", "px", ",", "py", ",", "pz", "=", "mc", ".", "player", ".", "getPos", "(", ")", "# Create block at current player tile location.", "if", "subtype", "is", "None", ":", "mc", ".", "setBlock", "(", "ptx", ",", "pty", ",", "ptz", ",", "block_id", ")", "else", ":", "mc", ".", "setBlock", "(", "ptx", ",", "pty", ",", "ptz", ",", "block_id", ",", "subtype", ")", "# Move the player's real positon up one block.", "mc", ".", "player", ".", "setPos", "(", "px", ",", "py", "+", "1", ",", "pz", ")" ]
Build a block with the specified id and subtype under the player in the Minecraft world. Subtype is optional and can be specified as None to use the default subtype for the block.
[ "Build", "a", "block", "with", "the", "specified", "id", "and", "subtype", "under", "the", "player", "in", "the", "Minecraft", "world", ".", "Subtype", "is", "optional", "and", "can", "be", "specified", "as", "None", "to", "use", "the", "default", "subtype", "for", "the", "block", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/examples/mcpi_listen.py#L48-L62
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532._busy_wait_ms
def _busy_wait_ms(self, ms): """Busy wait for the specified number of milliseconds.""" start = time.time() delta = ms/1000.0 while (time.time() - start) <= delta: pass
python
def _busy_wait_ms(self, ms): """Busy wait for the specified number of milliseconds.""" start = time.time() delta = ms/1000.0 while (time.time() - start) <= delta: pass
[ "def", "_busy_wait_ms", "(", "self", ",", "ms", ")", ":", "start", "=", "time", ".", "time", "(", ")", "delta", "=", "ms", "/", "1000.0", "while", "(", "time", ".", "time", "(", ")", "-", "start", ")", "<=", "delta", ":", "pass" ]
Busy wait for the specified number of milliseconds.
[ "Busy", "wait", "for", "the", "specified", "number", "of", "milliseconds", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L191-L196
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532._write_frame
def _write_frame(self, data): """Write a frame to the PN532 with the specified data bytearray.""" assert data is not None and 0 < len(data) < 255, 'Data must be array of 1 to 255 bytes.' # Build frame to send as: # - SPI data write (0x01) # - Preamble (0x00) # - Start code (0x00, 0xFF) # - Command length (1 byte) # - Command length checksum # - Command bytes # - Checksum # - Postamble (0x00) length = len(data) frame = bytearray(length+8) frame[0] = PN532_SPI_DATAWRITE frame[1] = PN532_PREAMBLE frame[2] = PN532_STARTCODE1 frame[3] = PN532_STARTCODE2 frame[4] = length & 0xFF frame[5] = self._uint8_add(~length, 1) frame[6:-2] = data checksum = reduce(self._uint8_add, data, 0xFF) frame[-2] = ~checksum & 0xFF frame[-1] = PN532_POSTAMBLE # Send frame. logger.debug('Write frame: 0x{0}'.format(binascii.hexlify(frame))) self._gpio.set_low(self._cs) self._busy_wait_ms(2) self._spi.write(frame) self._gpio.set_high(self._cs)
python
def _write_frame(self, data): """Write a frame to the PN532 with the specified data bytearray.""" assert data is not None and 0 < len(data) < 255, 'Data must be array of 1 to 255 bytes.' # Build frame to send as: # - SPI data write (0x01) # - Preamble (0x00) # - Start code (0x00, 0xFF) # - Command length (1 byte) # - Command length checksum # - Command bytes # - Checksum # - Postamble (0x00) length = len(data) frame = bytearray(length+8) frame[0] = PN532_SPI_DATAWRITE frame[1] = PN532_PREAMBLE frame[2] = PN532_STARTCODE1 frame[3] = PN532_STARTCODE2 frame[4] = length & 0xFF frame[5] = self._uint8_add(~length, 1) frame[6:-2] = data checksum = reduce(self._uint8_add, data, 0xFF) frame[-2] = ~checksum & 0xFF frame[-1] = PN532_POSTAMBLE # Send frame. logger.debug('Write frame: 0x{0}'.format(binascii.hexlify(frame))) self._gpio.set_low(self._cs) self._busy_wait_ms(2) self._spi.write(frame) self._gpio.set_high(self._cs)
[ "def", "_write_frame", "(", "self", ",", "data", ")", ":", "assert", "data", "is", "not", "None", "and", "0", "<", "len", "(", "data", ")", "<", "255", ",", "'Data must be array of 1 to 255 bytes.'", "# Build frame to send as:", "# - SPI data write (0x01)", "# - Preamble (0x00)", "# - Start code (0x00, 0xFF)", "# - Command length (1 byte)", "# - Command length checksum", "# - Command bytes", "# - Checksum", "# - Postamble (0x00)", "length", "=", "len", "(", "data", ")", "frame", "=", "bytearray", "(", "length", "+", "8", ")", "frame", "[", "0", "]", "=", "PN532_SPI_DATAWRITE", "frame", "[", "1", "]", "=", "PN532_PREAMBLE", "frame", "[", "2", "]", "=", "PN532_STARTCODE1", "frame", "[", "3", "]", "=", "PN532_STARTCODE2", "frame", "[", "4", "]", "=", "length", "&", "0xFF", "frame", "[", "5", "]", "=", "self", ".", "_uint8_add", "(", "~", "length", ",", "1", ")", "frame", "[", "6", ":", "-", "2", "]", "=", "data", "checksum", "=", "reduce", "(", "self", ".", "_uint8_add", ",", "data", ",", "0xFF", ")", "frame", "[", "-", "2", "]", "=", "~", "checksum", "&", "0xFF", "frame", "[", "-", "1", "]", "=", "PN532_POSTAMBLE", "# Send frame.", "logger", ".", "debug", "(", "'Write frame: 0x{0}'", ".", "format", "(", "binascii", ".", "hexlify", "(", "frame", ")", ")", ")", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "self", ".", "_busy_wait_ms", "(", "2", ")", "self", ".", "_spi", ".", "write", "(", "frame", ")", "self", ".", "_gpio", ".", "set_high", "(", "self", ".", "_cs", ")" ]
Write a frame to the PN532 with the specified data bytearray.
[ "Write", "a", "frame", "to", "the", "PN532", "with", "the", "specified", "data", "bytearray", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L198-L227
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532._read_data
def _read_data(self, count): """Read a specified count of bytes from the PN532.""" # Build a read request frame. frame = bytearray(count) frame[0] = PN532_SPI_DATAREAD # Send the frame and return the response, ignoring the SPI header byte. self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer(frame) self._gpio.set_high(self._cs) return response
python
def _read_data(self, count): """Read a specified count of bytes from the PN532.""" # Build a read request frame. frame = bytearray(count) frame[0] = PN532_SPI_DATAREAD # Send the frame and return the response, ignoring the SPI header byte. self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer(frame) self._gpio.set_high(self._cs) return response
[ "def", "_read_data", "(", "self", ",", "count", ")", ":", "# Build a read request frame.", "frame", "=", "bytearray", "(", "count", ")", "frame", "[", "0", "]", "=", "PN532_SPI_DATAREAD", "# Send the frame and return the response, ignoring the SPI header byte.", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "self", ".", "_busy_wait_ms", "(", "2", ")", "response", "=", "self", ".", "_spi", ".", "transfer", "(", "frame", ")", "self", ".", "_gpio", ".", "set_high", "(", "self", ".", "_cs", ")", "return", "response" ]
Read a specified count of bytes from the PN532.
[ "Read", "a", "specified", "count", "of", "bytes", "from", "the", "PN532", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L229-L239
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532._read_frame
def _read_frame(self, length): """Read a response frame from the PN532 of at most length bytes in size. Returns the data inside the frame if found, otherwise raises an exception if there is an error parsing the frame. Note that less than length bytes might be returned! """ # Read frame with expected length of data. response = self._read_data(length+8) logger.debug('Read frame: 0x{0}'.format(binascii.hexlify(response))) # Check frame starts with 0x01 and then has 0x00FF (preceeded by optional # zeros). if response[0] != 0x01: raise RuntimeError('Response frame does not start with 0x01!') # Swallow all the 0x00 values that preceed 0xFF. offset = 1 while response[offset] == 0x00: offset += 1 if offset >= len(response): raise RuntimeError('Response frame preamble does not contain 0x00FF!') if response[offset] != 0xFF: raise RuntimeError('Response frame preamble does not contain 0x00FF!') offset += 1 if offset >= len(response): raise RuntimeError('Response contains no data!') # Check length & length checksum match. frame_len = response[offset] if (frame_len + response[offset+1]) & 0xFF != 0: raise RuntimeError('Response length checksum did not match length!') # Check frame checksum value matches bytes. checksum = reduce(self._uint8_add, response[offset+2:offset+2+frame_len+1], 0) if checksum != 0: raise RuntimeError('Response checksum did not match expected value!') # Return frame data. return response[offset+2:offset+2+frame_len]
python
def _read_frame(self, length): """Read a response frame from the PN532 of at most length bytes in size. Returns the data inside the frame if found, otherwise raises an exception if there is an error parsing the frame. Note that less than length bytes might be returned! """ # Read frame with expected length of data. response = self._read_data(length+8) logger.debug('Read frame: 0x{0}'.format(binascii.hexlify(response))) # Check frame starts with 0x01 and then has 0x00FF (preceeded by optional # zeros). if response[0] != 0x01: raise RuntimeError('Response frame does not start with 0x01!') # Swallow all the 0x00 values that preceed 0xFF. offset = 1 while response[offset] == 0x00: offset += 1 if offset >= len(response): raise RuntimeError('Response frame preamble does not contain 0x00FF!') if response[offset] != 0xFF: raise RuntimeError('Response frame preamble does not contain 0x00FF!') offset += 1 if offset >= len(response): raise RuntimeError('Response contains no data!') # Check length & length checksum match. frame_len = response[offset] if (frame_len + response[offset+1]) & 0xFF != 0: raise RuntimeError('Response length checksum did not match length!') # Check frame checksum value matches bytes. checksum = reduce(self._uint8_add, response[offset+2:offset+2+frame_len+1], 0) if checksum != 0: raise RuntimeError('Response checksum did not match expected value!') # Return frame data. return response[offset+2:offset+2+frame_len]
[ "def", "_read_frame", "(", "self", ",", "length", ")", ":", "# Read frame with expected length of data.", "response", "=", "self", ".", "_read_data", "(", "length", "+", "8", ")", "logger", ".", "debug", "(", "'Read frame: 0x{0}'", ".", "format", "(", "binascii", ".", "hexlify", "(", "response", ")", ")", ")", "# Check frame starts with 0x01 and then has 0x00FF (preceeded by optional", "# zeros).", "if", "response", "[", "0", "]", "!=", "0x01", ":", "raise", "RuntimeError", "(", "'Response frame does not start with 0x01!'", ")", "# Swallow all the 0x00 values that preceed 0xFF.", "offset", "=", "1", "while", "response", "[", "offset", "]", "==", "0x00", ":", "offset", "+=", "1", "if", "offset", ">=", "len", "(", "response", ")", ":", "raise", "RuntimeError", "(", "'Response frame preamble does not contain 0x00FF!'", ")", "if", "response", "[", "offset", "]", "!=", "0xFF", ":", "raise", "RuntimeError", "(", "'Response frame preamble does not contain 0x00FF!'", ")", "offset", "+=", "1", "if", "offset", ">=", "len", "(", "response", ")", ":", "raise", "RuntimeError", "(", "'Response contains no data!'", ")", "# Check length & length checksum match.", "frame_len", "=", "response", "[", "offset", "]", "if", "(", "frame_len", "+", "response", "[", "offset", "+", "1", "]", ")", "&", "0xFF", "!=", "0", ":", "raise", "RuntimeError", "(", "'Response length checksum did not match length!'", ")", "# Check frame checksum value matches bytes.", "checksum", "=", "reduce", "(", "self", ".", "_uint8_add", ",", "response", "[", "offset", "+", "2", ":", "offset", "+", "2", "+", "frame_len", "+", "1", "]", ",", "0", ")", "if", "checksum", "!=", "0", ":", "raise", "RuntimeError", "(", "'Response checksum did not match expected value!'", ")", "# Return frame data.", "return", "response", "[", "offset", "+", "2", ":", "offset", "+", "2", "+", "frame_len", "]" ]
Read a response frame from the PN532 of at most length bytes in size. Returns the data inside the frame if found, otherwise raises an exception if there is an error parsing the frame. Note that less than length bytes might be returned!
[ "Read", "a", "response", "frame", "from", "the", "PN532", "of", "at", "most", "length", "bytes", "in", "size", ".", "Returns", "the", "data", "inside", "the", "frame", "if", "found", "otherwise", "raises", "an", "exception", "if", "there", "is", "an", "error", "parsing", "the", "frame", ".", "Note", "that", "less", "than", "length", "bytes", "might", "be", "returned!" ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L241-L274
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532._wait_ready
def _wait_ready(self, timeout_sec=1): """Wait until the PN532 is ready to receive commands. At most wait timeout_sec seconds for the PN532 to be ready. If the PN532 is ready before the timeout is exceeded then True will be returned, otherwise False is returned when the timeout is exceeded. """ start = time.time() # Send a SPI status read command and read response. self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer([PN532_SPI_STATREAD, 0x00]) self._gpio.set_high(self._cs) # Loop until a ready response is received. while response[1] != PN532_SPI_READY: # Check if the timeout has been exceeded. if time.time() - start >= timeout_sec: return False # Wait a little while and try reading the status again. time.sleep(0.01) self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer([PN532_SPI_STATREAD, 0x00]) self._gpio.set_high(self._cs) return True
python
def _wait_ready(self, timeout_sec=1): """Wait until the PN532 is ready to receive commands. At most wait timeout_sec seconds for the PN532 to be ready. If the PN532 is ready before the timeout is exceeded then True will be returned, otherwise False is returned when the timeout is exceeded. """ start = time.time() # Send a SPI status read command and read response. self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer([PN532_SPI_STATREAD, 0x00]) self._gpio.set_high(self._cs) # Loop until a ready response is received. while response[1] != PN532_SPI_READY: # Check if the timeout has been exceeded. if time.time() - start >= timeout_sec: return False # Wait a little while and try reading the status again. time.sleep(0.01) self._gpio.set_low(self._cs) self._busy_wait_ms(2) response = self._spi.transfer([PN532_SPI_STATREAD, 0x00]) self._gpio.set_high(self._cs) return True
[ "def", "_wait_ready", "(", "self", ",", "timeout_sec", "=", "1", ")", ":", "start", "=", "time", ".", "time", "(", ")", "# Send a SPI status read command and read response.", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "self", ".", "_busy_wait_ms", "(", "2", ")", "response", "=", "self", ".", "_spi", ".", "transfer", "(", "[", "PN532_SPI_STATREAD", ",", "0x00", "]", ")", "self", ".", "_gpio", ".", "set_high", "(", "self", ".", "_cs", ")", "# Loop until a ready response is received.", "while", "response", "[", "1", "]", "!=", "PN532_SPI_READY", ":", "# Check if the timeout has been exceeded.", "if", "time", ".", "time", "(", ")", "-", "start", ">=", "timeout_sec", ":", "return", "False", "# Wait a little while and try reading the status again.", "time", ".", "sleep", "(", "0.01", ")", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "self", ".", "_busy_wait_ms", "(", "2", ")", "response", "=", "self", ".", "_spi", ".", "transfer", "(", "[", "PN532_SPI_STATREAD", ",", "0x00", "]", ")", "self", ".", "_gpio", ".", "set_high", "(", "self", ".", "_cs", ")", "return", "True" ]
Wait until the PN532 is ready to receive commands. At most wait timeout_sec seconds for the PN532 to be ready. If the PN532 is ready before the timeout is exceeded then True will be returned, otherwise False is returned when the timeout is exceeded.
[ "Wait", "until", "the", "PN532", "is", "ready", "to", "receive", "commands", ".", "At", "most", "wait", "timeout_sec", "seconds", "for", "the", "PN532", "to", "be", "ready", ".", "If", "the", "PN532", "is", "ready", "before", "the", "timeout", "is", "exceeded", "then", "True", "will", "be", "returned", "otherwise", "False", "is", "returned", "when", "the", "timeout", "is", "exceeded", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L276-L299
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.call_function
def call_function(self, command, response_length=0, params=[], timeout_sec=1): """Send specified command to the PN532 and expect up to response_length bytes back in a response. Note that less than the expected bytes might be returned! Params can optionally specify an array of bytes to send as parameters to the function call. Will wait up to timeout_secs seconds for a response and return a bytearray of response bytes, or None if no response is available within the timeout. """ # Build frame data with command and parameters. data = bytearray(2+len(params)) data[0] = PN532_HOSTTOPN532 data[1] = command & 0xFF data[2:] = params # Send frame and wait for response. self._write_frame(data) if not self._wait_ready(timeout_sec): return None # Verify ACK response and wait to be ready for function response. response = self._read_data(len(PN532_ACK)) if response != PN532_ACK: raise RuntimeError('Did not receive expected ACK from PN532!') if not self._wait_ready(timeout_sec): return None # Read response bytes. response = self._read_frame(response_length+2) # Check that response is for the called function. if not (response[0] == PN532_PN532TOHOST and response[1] == (command+1)): raise RuntimeError('Received unexpected command response!') # Return response data. return response[2:]
python
def call_function(self, command, response_length=0, params=[], timeout_sec=1): """Send specified command to the PN532 and expect up to response_length bytes back in a response. Note that less than the expected bytes might be returned! Params can optionally specify an array of bytes to send as parameters to the function call. Will wait up to timeout_secs seconds for a response and return a bytearray of response bytes, or None if no response is available within the timeout. """ # Build frame data with command and parameters. data = bytearray(2+len(params)) data[0] = PN532_HOSTTOPN532 data[1] = command & 0xFF data[2:] = params # Send frame and wait for response. self._write_frame(data) if not self._wait_ready(timeout_sec): return None # Verify ACK response and wait to be ready for function response. response = self._read_data(len(PN532_ACK)) if response != PN532_ACK: raise RuntimeError('Did not receive expected ACK from PN532!') if not self._wait_ready(timeout_sec): return None # Read response bytes. response = self._read_frame(response_length+2) # Check that response is for the called function. if not (response[0] == PN532_PN532TOHOST and response[1] == (command+1)): raise RuntimeError('Received unexpected command response!') # Return response data. return response[2:]
[ "def", "call_function", "(", "self", ",", "command", ",", "response_length", "=", "0", ",", "params", "=", "[", "]", ",", "timeout_sec", "=", "1", ")", ":", "# Build frame data with command and parameters.", "data", "=", "bytearray", "(", "2", "+", "len", "(", "params", ")", ")", "data", "[", "0", "]", "=", "PN532_HOSTTOPN532", "data", "[", "1", "]", "=", "command", "&", "0xFF", "data", "[", "2", ":", "]", "=", "params", "# Send frame and wait for response.", "self", ".", "_write_frame", "(", "data", ")", "if", "not", "self", ".", "_wait_ready", "(", "timeout_sec", ")", ":", "return", "None", "# Verify ACK response and wait to be ready for function response.", "response", "=", "self", ".", "_read_data", "(", "len", "(", "PN532_ACK", ")", ")", "if", "response", "!=", "PN532_ACK", ":", "raise", "RuntimeError", "(", "'Did not receive expected ACK from PN532!'", ")", "if", "not", "self", ".", "_wait_ready", "(", "timeout_sec", ")", ":", "return", "None", "# Read response bytes.", "response", "=", "self", ".", "_read_frame", "(", "response_length", "+", "2", ")", "# Check that response is for the called function.", "if", "not", "(", "response", "[", "0", "]", "==", "PN532_PN532TOHOST", "and", "response", "[", "1", "]", "==", "(", "command", "+", "1", ")", ")", ":", "raise", "RuntimeError", "(", "'Received unexpected command response!'", ")", "# Return response data.", "return", "response", "[", "2", ":", "]" ]
Send specified command to the PN532 and expect up to response_length bytes back in a response. Note that less than the expected bytes might be returned! Params can optionally specify an array of bytes to send as parameters to the function call. Will wait up to timeout_secs seconds for a response and return a bytearray of response bytes, or None if no response is available within the timeout.
[ "Send", "specified", "command", "to", "the", "PN532", "and", "expect", "up", "to", "response_length", "bytes", "back", "in", "a", "response", ".", "Note", "that", "less", "than", "the", "expected", "bytes", "might", "be", "returned!", "Params", "can", "optionally", "specify", "an", "array", "of", "bytes", "to", "send", "as", "parameters", "to", "the", "function", "call", ".", "Will", "wait", "up", "to", "timeout_secs", "seconds", "for", "a", "response", "and", "return", "a", "bytearray", "of", "response", "bytes", "or", "None", "if", "no", "response", "is", "available", "within", "the", "timeout", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L301-L330
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.begin
def begin(self): """Initialize communication with the PN532. Must be called before any other calls are made against the PN532. """ # Assert CS pin low for a second for PN532 to be ready. self._gpio.set_low(self._cs) time.sleep(1.0) # Call GetFirmwareVersion to sync up with the PN532. This might not be # required but is done in the Arduino library and kept for consistency. self.get_firmware_version() self._gpio.set_high(self._cs)
python
def begin(self): """Initialize communication with the PN532. Must be called before any other calls are made against the PN532. """ # Assert CS pin low for a second for PN532 to be ready. self._gpio.set_low(self._cs) time.sleep(1.0) # Call GetFirmwareVersion to sync up with the PN532. This might not be # required but is done in the Arduino library and kept for consistency. self.get_firmware_version() self._gpio.set_high(self._cs)
[ "def", "begin", "(", "self", ")", ":", "# Assert CS pin low for a second for PN532 to be ready.", "self", ".", "_gpio", ".", "set_low", "(", "self", ".", "_cs", ")", "time", ".", "sleep", "(", "1.0", ")", "# Call GetFirmwareVersion to sync up with the PN532. This might not be", "# required but is done in the Arduino library and kept for consistency.", "self", ".", "get_firmware_version", "(", ")", "self", ".", "_gpio", ".", "set_high", "(", "self", ".", "_cs", ")" ]
Initialize communication with the PN532. Must be called before any other calls are made against the PN532.
[ "Initialize", "communication", "with", "the", "PN532", ".", "Must", "be", "called", "before", "any", "other", "calls", "are", "made", "against", "the", "PN532", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L332-L342
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.get_firmware_version
def get_firmware_version(self): """Call PN532 GetFirmwareVersion function and return a tuple with the IC, Ver, Rev, and Support values. """ response = self.call_function(PN532_COMMAND_GETFIRMWAREVERSION, 4) if response is None: raise RuntimeError('Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.') return (response[0], response[1], response[2], response[3])
python
def get_firmware_version(self): """Call PN532 GetFirmwareVersion function and return a tuple with the IC, Ver, Rev, and Support values. """ response = self.call_function(PN532_COMMAND_GETFIRMWAREVERSION, 4) if response is None: raise RuntimeError('Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.') return (response[0], response[1], response[2], response[3])
[ "def", "get_firmware_version", "(", "self", ")", ":", "response", "=", "self", ".", "call_function", "(", "PN532_COMMAND_GETFIRMWAREVERSION", ",", "4", ")", "if", "response", "is", "None", ":", "raise", "RuntimeError", "(", "'Failed to detect the PN532! Make sure there is sufficient power (use a 1 amp or greater power supply), the PN532 is wired correctly to the device, and the solder joints on the PN532 headers are solidly connected.'", ")", "return", "(", "response", "[", "0", "]", ",", "response", "[", "1", "]", ",", "response", "[", "2", "]", ",", "response", "[", "3", "]", ")" ]
Call PN532 GetFirmwareVersion function and return a tuple with the IC, Ver, Rev, and Support values.
[ "Call", "PN532", "GetFirmwareVersion", "function", "and", "return", "a", "tuple", "with", "the", "IC", "Ver", "Rev", "and", "Support", "values", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L344-L351
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.read_passive_target
def read_passive_target(self, card_baud=PN532_MIFARE_ISO14443A, timeout_sec=1): """Wait for a MiFare card to be available and return its UID when found. Will wait up to timeout_sec seconds and return None if no card is found, otherwise a bytearray with the UID of the found card is returned. """ # Send passive read command for 1 card. Expect at most a 7 byte UUID. response = self.call_function(PN532_COMMAND_INLISTPASSIVETARGET, params=[0x01, card_baud], response_length=17) # If no response is available return None to indicate no card is present. if response is None: return None # Check only 1 card with up to a 7 byte UID is present. if response[0] != 0x01: raise RuntimeError('More than one card detected!') if response[5] > 7: raise RuntimeError('Found card with unexpectedly long UID!') # Return UID of card. return response[6:6+response[5]]
python
def read_passive_target(self, card_baud=PN532_MIFARE_ISO14443A, timeout_sec=1): """Wait for a MiFare card to be available and return its UID when found. Will wait up to timeout_sec seconds and return None if no card is found, otherwise a bytearray with the UID of the found card is returned. """ # Send passive read command for 1 card. Expect at most a 7 byte UUID. response = self.call_function(PN532_COMMAND_INLISTPASSIVETARGET, params=[0x01, card_baud], response_length=17) # If no response is available return None to indicate no card is present. if response is None: return None # Check only 1 card with up to a 7 byte UID is present. if response[0] != 0x01: raise RuntimeError('More than one card detected!') if response[5] > 7: raise RuntimeError('Found card with unexpectedly long UID!') # Return UID of card. return response[6:6+response[5]]
[ "def", "read_passive_target", "(", "self", ",", "card_baud", "=", "PN532_MIFARE_ISO14443A", ",", "timeout_sec", "=", "1", ")", ":", "# Send passive read command for 1 card. Expect at most a 7 byte UUID.", "response", "=", "self", ".", "call_function", "(", "PN532_COMMAND_INLISTPASSIVETARGET", ",", "params", "=", "[", "0x01", ",", "card_baud", "]", ",", "response_length", "=", "17", ")", "# If no response is available return None to indicate no card is present.", "if", "response", "is", "None", ":", "return", "None", "# Check only 1 card with up to a 7 byte UID is present.", "if", "response", "[", "0", "]", "!=", "0x01", ":", "raise", "RuntimeError", "(", "'More than one card detected!'", ")", "if", "response", "[", "5", "]", ">", "7", ":", "raise", "RuntimeError", "(", "'Found card with unexpectedly long UID!'", ")", "# Return UID of card.", "return", "response", "[", "6", ":", "6", "+", "response", "[", "5", "]", "]" ]
Wait for a MiFare card to be available and return its UID when found. Will wait up to timeout_sec seconds and return None if no card is found, otherwise a bytearray with the UID of the found card is returned.
[ "Wait", "for", "a", "MiFare", "card", "to", "be", "available", "and", "return", "its", "UID", "when", "found", ".", "Will", "wait", "up", "to", "timeout_sec", "seconds", "and", "return", "None", "if", "no", "card", "is", "found", "otherwise", "a", "bytearray", "with", "the", "UID", "of", "the", "found", "card", "is", "returned", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L363-L381
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.mifare_classic_read_block
def mifare_classic_read_block(self, block_number): """Read a block of data from the card. Block number should be the block to read. If the block is successfully read a bytearray of length 16 with data starting at the specified block will be returned. If the block is not read then None will be returned. """ # Send InDataExchange request to read block of MiFare data. response = self.call_function(PN532_COMMAND_INDATAEXCHANGE, params=[0x01, MIFARE_CMD_READ, block_number & 0xFF], response_length=17) # Check first response is 0x00 to show success. if response[0] != 0x00: return None # Return first 4 bytes since 16 bytes are always returned. return response[1:]
python
def mifare_classic_read_block(self, block_number): """Read a block of data from the card. Block number should be the block to read. If the block is successfully read a bytearray of length 16 with data starting at the specified block will be returned. If the block is not read then None will be returned. """ # Send InDataExchange request to read block of MiFare data. response = self.call_function(PN532_COMMAND_INDATAEXCHANGE, params=[0x01, MIFARE_CMD_READ, block_number & 0xFF], response_length=17) # Check first response is 0x00 to show success. if response[0] != 0x00: return None # Return first 4 bytes since 16 bytes are always returned. return response[1:]
[ "def", "mifare_classic_read_block", "(", "self", ",", "block_number", ")", ":", "# Send InDataExchange request to read block of MiFare data.", "response", "=", "self", ".", "call_function", "(", "PN532_COMMAND_INDATAEXCHANGE", ",", "params", "=", "[", "0x01", ",", "MIFARE_CMD_READ", ",", "block_number", "&", "0xFF", "]", ",", "response_length", "=", "17", ")", "# Check first response is 0x00 to show success.", "if", "response", "[", "0", "]", "!=", "0x00", ":", "return", "None", "# Return first 4 bytes since 16 bytes are always returned.", "return", "response", "[", "1", ":", "]" ]
Read a block of data from the card. Block number should be the block to read. If the block is successfully read a bytearray of length 16 with data starting at the specified block will be returned. If the block is not read then None will be returned.
[ "Read", "a", "block", "of", "data", "from", "the", "card", ".", "Block", "number", "should", "be", "the", "block", "to", "read", ".", "If", "the", "block", "is", "successfully", "read", "a", "bytearray", "of", "length", "16", "with", "data", "starting", "at", "the", "specified", "block", "will", "be", "returned", ".", "If", "the", "block", "is", "not", "read", "then", "None", "will", "be", "returned", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L406-L420
train
adafruit/Adafruit_Python_PN532
Adafruit_PN532/PN532.py
PN532.mifare_classic_write_block
def mifare_classic_write_block(self, block_number, data): """Write a block of data to the card. Block number should be the block to write and data should be a byte array of length 16 with the data to write. If the data is successfully written then True is returned, otherwise False is returned. """ assert data is not None and len(data) == 16, 'Data must be an array of 16 bytes!' # Build parameters for InDataExchange command to do MiFare classic write. params = bytearray(19) params[0] = 0x01 # Max card numbers params[1] = MIFARE_CMD_WRITE params[2] = block_number & 0xFF params[3:] = data # Send InDataExchange request. response = self.call_function(PN532_COMMAND_INDATAEXCHANGE, params=params, response_length=1) return response[0] == 0x00
python
def mifare_classic_write_block(self, block_number, data): """Write a block of data to the card. Block number should be the block to write and data should be a byte array of length 16 with the data to write. If the data is successfully written then True is returned, otherwise False is returned. """ assert data is not None and len(data) == 16, 'Data must be an array of 16 bytes!' # Build parameters for InDataExchange command to do MiFare classic write. params = bytearray(19) params[0] = 0x01 # Max card numbers params[1] = MIFARE_CMD_WRITE params[2] = block_number & 0xFF params[3:] = data # Send InDataExchange request. response = self.call_function(PN532_COMMAND_INDATAEXCHANGE, params=params, response_length=1) return response[0] == 0x00
[ "def", "mifare_classic_write_block", "(", "self", ",", "block_number", ",", "data", ")", ":", "assert", "data", "is", "not", "None", "and", "len", "(", "data", ")", "==", "16", ",", "'Data must be an array of 16 bytes!'", "# Build parameters for InDataExchange command to do MiFare classic write.", "params", "=", "bytearray", "(", "19", ")", "params", "[", "0", "]", "=", "0x01", "# Max card numbers", "params", "[", "1", "]", "=", "MIFARE_CMD_WRITE", "params", "[", "2", "]", "=", "block_number", "&", "0xFF", "params", "[", "3", ":", "]", "=", "data", "# Send InDataExchange request.", "response", "=", "self", ".", "call_function", "(", "PN532_COMMAND_INDATAEXCHANGE", ",", "params", "=", "params", ",", "response_length", "=", "1", ")", "return", "response", "[", "0", "]", "==", "0x00" ]
Write a block of data to the card. Block number should be the block to write and data should be a byte array of length 16 with the data to write. If the data is successfully written then True is returned, otherwise False is returned.
[ "Write", "a", "block", "of", "data", "to", "the", "card", ".", "Block", "number", "should", "be", "the", "block", "to", "write", "and", "data", "should", "be", "a", "byte", "array", "of", "length", "16", "with", "the", "data", "to", "write", ".", "If", "the", "data", "is", "successfully", "written", "then", "True", "is", "returned", "otherwise", "False", "is", "returned", "." ]
343521a8ec842ea82f680a5ed868fee16e9609bd
https://github.com/adafruit/Adafruit_Python_PN532/blob/343521a8ec842ea82f680a5ed868fee16e9609bd/Adafruit_PN532/PN532.py#L422-L439
train
edwardgeorge/virtualenv-clone
clonevirtualenv.py
_dirmatch
def _dirmatch(path, matchwith): """Check if path is within matchwith's tree. >>> _dirmatch('/home/foo/bar', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/etc', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar2', '/home/foo/bar') False >>> _dirmatch('/home/foo/bar2/etc', '/home/foo/bar') False """ matchlen = len(matchwith) if (path.startswith(matchwith) and path[matchlen:matchlen + 1] in [os.sep, '']): return True return False
python
def _dirmatch(path, matchwith): """Check if path is within matchwith's tree. >>> _dirmatch('/home/foo/bar', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/etc', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar2', '/home/foo/bar') False >>> _dirmatch('/home/foo/bar2/etc', '/home/foo/bar') False """ matchlen = len(matchwith) if (path.startswith(matchwith) and path[matchlen:matchlen + 1] in [os.sep, '']): return True return False
[ "def", "_dirmatch", "(", "path", ",", "matchwith", ")", ":", "matchlen", "=", "len", "(", "matchwith", ")", "if", "(", "path", ".", "startswith", "(", "matchwith", ")", "and", "path", "[", "matchlen", ":", "matchlen", "+", "1", "]", "in", "[", "os", ".", "sep", ",", "''", "]", ")", ":", "return", "True", "return", "False" ]
Check if path is within matchwith's tree. >>> _dirmatch('/home/foo/bar', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar/etc', '/home/foo/bar') True >>> _dirmatch('/home/foo/bar2', '/home/foo/bar') False >>> _dirmatch('/home/foo/bar2/etc', '/home/foo/bar') False
[ "Check", "if", "path", "is", "within", "matchwith", "s", "tree", "." ]
434b12eb725ac1850b60f2bad8e848540e5596de
https://github.com/edwardgeorge/virtualenv-clone/blob/434b12eb725ac1850b60f2bad8e848540e5596de/clonevirtualenv.py#L29-L47
train
edwardgeorge/virtualenv-clone
clonevirtualenv.py
_virtualenv_sys
def _virtualenv_sys(venv_path): "obtain version and path info from a virtualenv." executable = os.path.join(venv_path, env_bin_dir, 'python') # Must use "executable" as the first argument rather than as the # keyword argument "executable" to get correct value from sys.path p = subprocess.Popen([executable, '-c', 'import sys;' 'print (sys.version[:3]);' 'print ("\\n".join(sys.path));'], env={}, stdout=subprocess.PIPE) stdout, err = p.communicate() assert not p.returncode and stdout lines = stdout.decode('utf-8').splitlines() return lines[0], list(filter(bool, lines[1:]))
python
def _virtualenv_sys(venv_path): "obtain version and path info from a virtualenv." executable = os.path.join(venv_path, env_bin_dir, 'python') # Must use "executable" as the first argument rather than as the # keyword argument "executable" to get correct value from sys.path p = subprocess.Popen([executable, '-c', 'import sys;' 'print (sys.version[:3]);' 'print ("\\n".join(sys.path));'], env={}, stdout=subprocess.PIPE) stdout, err = p.communicate() assert not p.returncode and stdout lines = stdout.decode('utf-8').splitlines() return lines[0], list(filter(bool, lines[1:]))
[ "def", "_virtualenv_sys", "(", "venv_path", ")", ":", "executable", "=", "os", ".", "path", ".", "join", "(", "venv_path", ",", "env_bin_dir", ",", "'python'", ")", "# Must use \"executable\" as the first argument rather than as the", "# keyword argument \"executable\" to get correct value from sys.path", "p", "=", "subprocess", ".", "Popen", "(", "[", "executable", ",", "'-c'", ",", "'import sys;'", "'print (sys.version[:3]);'", "'print (\"\\\\n\".join(sys.path));'", "]", ",", "env", "=", "{", "}", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "stdout", ",", "err", "=", "p", ".", "communicate", "(", ")", "assert", "not", "p", ".", "returncode", "and", "stdout", "lines", "=", "stdout", ".", "decode", "(", "'utf-8'", ")", ".", "splitlines", "(", ")", "return", "lines", "[", "0", "]", ",", "list", "(", "filter", "(", "bool", ",", "lines", "[", "1", ":", "]", ")", ")" ]
obtain version and path info from a virtualenv.
[ "obtain", "version", "and", "path", "info", "from", "a", "virtualenv", "." ]
434b12eb725ac1850b60f2bad8e848540e5596de
https://github.com/edwardgeorge/virtualenv-clone/blob/434b12eb725ac1850b60f2bad8e848540e5596de/clonevirtualenv.py#L50-L64
train
dsoprea/PyEasyArchive
libarchive/types/archive_entry.py
int_to_ef
def int_to_ef(n): """This is here for testing support but, in practice, this isn't very useful as many of the flags are just combinations of other flags. The relationships are defined by the OS in ways that aren't semantically intuitive to this project. """ flags = {} for name, value in libarchive.constants.archive_entry.FILETYPES.items(): flags[name] = (n & value) > 0 return ENTRY_FILETYPE(**flags)
python
def int_to_ef(n): """This is here for testing support but, in practice, this isn't very useful as many of the flags are just combinations of other flags. The relationships are defined by the OS in ways that aren't semantically intuitive to this project. """ flags = {} for name, value in libarchive.constants.archive_entry.FILETYPES.items(): flags[name] = (n & value) > 0 return ENTRY_FILETYPE(**flags)
[ "def", "int_to_ef", "(", "n", ")", ":", "flags", "=", "{", "}", "for", "name", ",", "value", "in", "libarchive", ".", "constants", ".", "archive_entry", ".", "FILETYPES", ".", "items", "(", ")", ":", "flags", "[", "name", "]", "=", "(", "n", "&", "value", ")", ">", "0", "return", "ENTRY_FILETYPE", "(", "*", "*", "flags", ")" ]
This is here for testing support but, in practice, this isn't very useful as many of the flags are just combinations of other flags. The relationships are defined by the OS in ways that aren't semantically intuitive to this project.
[ "This", "is", "here", "for", "testing", "support", "but", "in", "practice", "this", "isn", "t", "very", "useful", "as", "many", "of", "the", "flags", "are", "just", "combinations", "of", "other", "flags", ".", "The", "relationships", "are", "defined", "by", "the", "OS", "in", "ways", "that", "aren", "t", "semantically", "intuitive", "to", "this", "project", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/types/archive_entry.py#L24-L35
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
_enumerator
def _enumerator(opener, entry_cls, format_code=None, filter_code=None): """Return an archive enumerator from a user-defined source, using a user- defined entry type. """ archive_res = _archive_read_new() try: r = _set_read_context(archive_res, format_code, filter_code) opener(archive_res) def it(): while 1: with _archive_read_next_header(archive_res) as entry_res: if entry_res is None: break e = entry_cls(archive_res, entry_res) yield e if e.is_consumed is False: _archive_read_data_skip(archive_res) yield it() finally: _archive_read_free(archive_res)
python
def _enumerator(opener, entry_cls, format_code=None, filter_code=None): """Return an archive enumerator from a user-defined source, using a user- defined entry type. """ archive_res = _archive_read_new() try: r = _set_read_context(archive_res, format_code, filter_code) opener(archive_res) def it(): while 1: with _archive_read_next_header(archive_res) as entry_res: if entry_res is None: break e = entry_cls(archive_res, entry_res) yield e if e.is_consumed is False: _archive_read_data_skip(archive_res) yield it() finally: _archive_read_free(archive_res)
[ "def", "_enumerator", "(", "opener", ",", "entry_cls", ",", "format_code", "=", "None", ",", "filter_code", "=", "None", ")", ":", "archive_res", "=", "_archive_read_new", "(", ")", "try", ":", "r", "=", "_set_read_context", "(", "archive_res", ",", "format_code", ",", "filter_code", ")", "opener", "(", "archive_res", ")", "def", "it", "(", ")", ":", "while", "1", ":", "with", "_archive_read_next_header", "(", "archive_res", ")", "as", "entry_res", ":", "if", "entry_res", "is", "None", ":", "break", "e", "=", "entry_cls", "(", "archive_res", ",", "entry_res", ")", "yield", "e", "if", "e", ".", "is_consumed", "is", "False", ":", "_archive_read_data_skip", "(", "archive_res", ")", "yield", "it", "(", ")", "finally", ":", "_archive_read_free", "(", "archive_res", ")" ]
Return an archive enumerator from a user-defined source, using a user- defined entry type.
[ "Return", "an", "archive", "enumerator", "from", "a", "user", "-", "defined", "source", "using", "a", "user", "-", "defined", "entry", "type", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L270-L293
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
file_enumerator
def file_enumerator(filepath, block_size=10240, *args, **kwargs): """Return an enumerator that knows how to read a physical file.""" _LOGGER.debug("Enumerating through archive file: %s", filepath) def opener(archive_res): _LOGGER.debug("Opening from file (file_enumerator): %s", filepath) _archive_read_open_filename(archive_res, filepath, block_size) if 'entry_cls' not in kwargs: kwargs['entry_cls'] = _ArchiveEntryItReadable return _enumerator(opener, *args, **kwargs)
python
def file_enumerator(filepath, block_size=10240, *args, **kwargs): """Return an enumerator that knows how to read a physical file.""" _LOGGER.debug("Enumerating through archive file: %s", filepath) def opener(archive_res): _LOGGER.debug("Opening from file (file_enumerator): %s", filepath) _archive_read_open_filename(archive_res, filepath, block_size) if 'entry_cls' not in kwargs: kwargs['entry_cls'] = _ArchiveEntryItReadable return _enumerator(opener, *args, **kwargs)
[ "def", "file_enumerator", "(", "filepath", ",", "block_size", "=", "10240", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_LOGGER", ".", "debug", "(", "\"Enumerating through archive file: %s\"", ",", "filepath", ")", "def", "opener", "(", "archive_res", ")", ":", "_LOGGER", ".", "debug", "(", "\"Opening from file (file_enumerator): %s\"", ",", "filepath", ")", "_archive_read_open_filename", "(", "archive_res", ",", "filepath", ",", "block_size", ")", "if", "'entry_cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'entry_cls'", "]", "=", "_ArchiveEntryItReadable", "return", "_enumerator", "(", "opener", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Return an enumerator that knows how to read a physical file.
[ "Return", "an", "enumerator", "that", "knows", "how", "to", "read", "a", "physical", "file", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L295-L309
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
memory_enumerator
def memory_enumerator(buffer_, *args, **kwargs): """Return an enumerator that knows how to read raw memory.""" _LOGGER.debug("Enumerating through (%d) bytes of archive data.", len(buffer_)) def opener(archive_res): _LOGGER.debug("Opening from (%d) bytes (memory_enumerator).", len(buffer_)) _archive_read_open_memory(archive_res, buffer_) if 'entry_cls' not in kwargs: kwargs['entry_cls'] = _ArchiveEntryItReadable return _enumerator(opener, *args, **kwargs)
python
def memory_enumerator(buffer_, *args, **kwargs): """Return an enumerator that knows how to read raw memory.""" _LOGGER.debug("Enumerating through (%d) bytes of archive data.", len(buffer_)) def opener(archive_res): _LOGGER.debug("Opening from (%d) bytes (memory_enumerator).", len(buffer_)) _archive_read_open_memory(archive_res, buffer_) if 'entry_cls' not in kwargs: kwargs['entry_cls'] = _ArchiveEntryItReadable return _enumerator(opener, *args, **kwargs)
[ "def", "memory_enumerator", "(", "buffer_", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_LOGGER", ".", "debug", "(", "\"Enumerating through (%d) bytes of archive data.\"", ",", "len", "(", "buffer_", ")", ")", "def", "opener", "(", "archive_res", ")", ":", "_LOGGER", ".", "debug", "(", "\"Opening from (%d) bytes (memory_enumerator).\"", ",", "len", "(", "buffer_", ")", ")", "_archive_read_open_memory", "(", "archive_res", ",", "buffer_", ")", "if", "'entry_cls'", "not", "in", "kwargs", ":", "kwargs", "[", "'entry_cls'", "]", "=", "_ArchiveEntryItReadable", "return", "_enumerator", "(", "opener", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
Return an enumerator that knows how to read raw memory.
[ "Return", "an", "enumerator", "that", "knows", "how", "to", "read", "raw", "memory", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L311-L328
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
_pour
def _pour(opener, flags=0, *args, **kwargs): """A flexible pouring facility that knows how to enumerate entry data.""" with _enumerator(opener, *args, entry_cls=_ArchiveEntryItState, **kwargs) as r: ext = libarchive.calls.archive_write.c_archive_write_disk_new() libarchive.calls.archive_write.c_archive_write_disk_set_options( ext, flags ) for state in r: yield state if state.selected is False: continue r = libarchive.calls.archive_write.c_archive_write_header( ext, state.entry_res) buff = ctypes.c_void_p() size = ctypes.c_size_t() offset = ctypes.c_longlong() while 1: r = libarchive.calls.archive_read.\ c_archive_read_data_block( state.reader_res, ctypes.byref(buff), ctypes.byref(size), ctypes.byref(offset)) if r == libarchive.constants.archive.ARCHIVE_EOF: break elif r != libarchive.constants.archive.ARCHIVE_OK: message = c_archive_error_string(state.reader_res) raise libarchive.exception.ArchiveError( "Pour failed: (%d) [%s]" % (r, message)) r = libarchive.calls.archive_write.c_archive_write_data_block( ext, buff, size, offset) r = libarchive.calls.archive_write.\ c_archive_write_finish_entry(ext)
python
def _pour(opener, flags=0, *args, **kwargs): """A flexible pouring facility that knows how to enumerate entry data.""" with _enumerator(opener, *args, entry_cls=_ArchiveEntryItState, **kwargs) as r: ext = libarchive.calls.archive_write.c_archive_write_disk_new() libarchive.calls.archive_write.c_archive_write_disk_set_options( ext, flags ) for state in r: yield state if state.selected is False: continue r = libarchive.calls.archive_write.c_archive_write_header( ext, state.entry_res) buff = ctypes.c_void_p() size = ctypes.c_size_t() offset = ctypes.c_longlong() while 1: r = libarchive.calls.archive_read.\ c_archive_read_data_block( state.reader_res, ctypes.byref(buff), ctypes.byref(size), ctypes.byref(offset)) if r == libarchive.constants.archive.ARCHIVE_EOF: break elif r != libarchive.constants.archive.ARCHIVE_OK: message = c_archive_error_string(state.reader_res) raise libarchive.exception.ArchiveError( "Pour failed: (%d) [%s]" % (r, message)) r = libarchive.calls.archive_write.c_archive_write_data_block( ext, buff, size, offset) r = libarchive.calls.archive_write.\ c_archive_write_finish_entry(ext)
[ "def", "_pour", "(", "opener", ",", "flags", "=", "0", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "with", "_enumerator", "(", "opener", ",", "*", "args", ",", "entry_cls", "=", "_ArchiveEntryItState", ",", "*", "*", "kwargs", ")", "as", "r", ":", "ext", "=", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_disk_new", "(", ")", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_disk_set_options", "(", "ext", ",", "flags", ")", "for", "state", "in", "r", ":", "yield", "state", "if", "state", ".", "selected", "is", "False", ":", "continue", "r", "=", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_header", "(", "ext", ",", "state", ".", "entry_res", ")", "buff", "=", "ctypes", ".", "c_void_p", "(", ")", "size", "=", "ctypes", ".", "c_size_t", "(", ")", "offset", "=", "ctypes", ".", "c_longlong", "(", ")", "while", "1", ":", "r", "=", "libarchive", ".", "calls", ".", "archive_read", ".", "c_archive_read_data_block", "(", "state", ".", "reader_res", ",", "ctypes", ".", "byref", "(", "buff", ")", ",", "ctypes", ".", "byref", "(", "size", ")", ",", "ctypes", ".", "byref", "(", "offset", ")", ")", "if", "r", "==", "libarchive", ".", "constants", ".", "archive", ".", "ARCHIVE_EOF", ":", "break", "elif", "r", "!=", "libarchive", ".", "constants", ".", "archive", ".", "ARCHIVE_OK", ":", "message", "=", "c_archive_error_string", "(", "state", ".", "reader_res", ")", "raise", "libarchive", ".", "exception", ".", "ArchiveError", "(", "\"Pour failed: (%d) [%s]\"", "%", "(", "r", ",", "message", ")", ")", "r", "=", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_data_block", "(", "ext", ",", "buff", ",", "size", ",", "offset", ")", "r", "=", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_finish_entry", "(", "ext", ")" ]
A flexible pouring facility that knows how to enumerate entry data.
[ "A", "flexible", "pouring", "facility", "that", "knows", "how", "to", "enumerate", "entry", "data", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L348-L397
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
file_pour
def file_pour(filepath, block_size=10240, *args, **kwargs): """Write physical files from entries.""" def opener(archive_res): _LOGGER.debug("Opening from file (file_pour): %s", filepath) _archive_read_open_filename(archive_res, filepath, block_size) return _pour(opener, *args, flags=0, **kwargs)
python
def file_pour(filepath, block_size=10240, *args, **kwargs): """Write physical files from entries.""" def opener(archive_res): _LOGGER.debug("Opening from file (file_pour): %s", filepath) _archive_read_open_filename(archive_res, filepath, block_size) return _pour(opener, *args, flags=0, **kwargs)
[ "def", "file_pour", "(", "filepath", ",", "block_size", "=", "10240", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "opener", "(", "archive_res", ")", ":", "_LOGGER", ".", "debug", "(", "\"Opening from file (file_pour): %s\"", ",", "filepath", ")", "_archive_read_open_filename", "(", "archive_res", ",", "filepath", ",", "block_size", ")", "return", "_pour", "(", "opener", ",", "*", "args", ",", "flags", "=", "0", ",", "*", "*", "kwargs", ")" ]
Write physical files from entries.
[ "Write", "physical", "files", "from", "entries", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L399-L406
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_read.py
memory_pour
def memory_pour(buffer_, *args, **kwargs): """Yield data from entries.""" def opener(archive_res): _LOGGER.debug("Opening from (%d) bytes (memory_pour).", len(buffer_)) _archive_read_open_memory(archive_res, buffer_) return _pour(opener, *args, flags=0, **kwargs)
python
def memory_pour(buffer_, *args, **kwargs): """Yield data from entries.""" def opener(archive_res): _LOGGER.debug("Opening from (%d) bytes (memory_pour).", len(buffer_)) _archive_read_open_memory(archive_res, buffer_) return _pour(opener, *args, flags=0, **kwargs)
[ "def", "memory_pour", "(", "buffer_", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "def", "opener", "(", "archive_res", ")", ":", "_LOGGER", ".", "debug", "(", "\"Opening from (%d) bytes (memory_pour).\"", ",", "len", "(", "buffer_", ")", ")", "_archive_read_open_memory", "(", "archive_res", ",", "buffer_", ")", "return", "_pour", "(", "opener", ",", "*", "args", ",", "flags", "=", "0", ",", "*", "*", "kwargs", ")" ]
Yield data from entries.
[ "Yield", "data", "from", "entries", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_read.py#L408-L415
train
dsoprea/PyEasyArchive
libarchive/adapters/archive_write.py
_archive_write_data
def _archive_write_data(archive, data): """Write data to archive. This will only be called with a non-empty string. """ n = libarchive.calls.archive_write.c_archive_write_data( archive, ctypes.cast(ctypes.c_char_p(data), ctypes.c_void_p), len(data)) if n == 0: message = c_archive_error_string(archive) raise ValueError("No bytes were written. Error? [%s]" % (message))
python
def _archive_write_data(archive, data): """Write data to archive. This will only be called with a non-empty string. """ n = libarchive.calls.archive_write.c_archive_write_data( archive, ctypes.cast(ctypes.c_char_p(data), ctypes.c_void_p), len(data)) if n == 0: message = c_archive_error_string(archive) raise ValueError("No bytes were written. Error? [%s]" % (message))
[ "def", "_archive_write_data", "(", "archive", ",", "data", ")", ":", "n", "=", "libarchive", ".", "calls", ".", "archive_write", ".", "c_archive_write_data", "(", "archive", ",", "ctypes", ".", "cast", "(", "ctypes", ".", "c_char_p", "(", "data", ")", ",", "ctypes", ".", "c_void_p", ")", ",", "len", "(", "data", ")", ")", "if", "n", "==", "0", ":", "message", "=", "c_archive_error_string", "(", "archive", ")", "raise", "ValueError", "(", "\"No bytes were written. Error? [%s]\"", "%", "(", "message", ")", ")" ]
Write data to archive. This will only be called with a non-empty string.
[ "Write", "data", "to", "archive", ".", "This", "will", "only", "be", "called", "with", "a", "non", "-", "empty", "string", "." ]
50414b9fa9a1055435499b5b2e4b2a336a40dff6
https://github.com/dsoprea/PyEasyArchive/blob/50414b9fa9a1055435499b5b2e4b2a336a40dff6/libarchive/adapters/archive_write.py#L71-L82
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._write_ctrl_meas
def _write_ctrl_meas(self): """ Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first """ self._write_register_byte(_BME280_REGISTER_CTRL_HUM, self.overscan_humidity) self._write_register_byte(_BME280_REGISTER_CTRL_MEAS, self._ctrl_meas)
python
def _write_ctrl_meas(self): """ Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first """ self._write_register_byte(_BME280_REGISTER_CTRL_HUM, self.overscan_humidity) self._write_register_byte(_BME280_REGISTER_CTRL_MEAS, self._ctrl_meas)
[ "def", "_write_ctrl_meas", "(", "self", ")", ":", "self", ".", "_write_register_byte", "(", "_BME280_REGISTER_CTRL_HUM", ",", "self", ".", "overscan_humidity", ")", "self", ".", "_write_register_byte", "(", "_BME280_REGISTER_CTRL_MEAS", ",", "self", ".", "_ctrl_meas", ")" ]
Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first
[ "Write", "the", "values", "to", "the", "ctrl_meas", "and", "ctrl_hum", "registers", "in", "the", "device", "ctrl_meas", "sets", "the", "pressure", "and", "temperature", "data", "acquistion", "options", "ctrl_hum", "sets", "the", "humidty", "oversampling", "and", "must", "be", "written", "to", "first" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L161-L168
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._write_config
def _write_config(self): """Write the value to the config register in the device """ normal_flag = False if self._mode == MODE_NORMAL: #Writes to the config register may be ignored while in Normal mode normal_flag = True self.mode = MODE_SLEEP #So we switch to Sleep mode first self._write_register_byte(_BME280_REGISTER_CONFIG, self._config) if normal_flag: self.mode = MODE_NORMAL
python
def _write_config(self): """Write the value to the config register in the device """ normal_flag = False if self._mode == MODE_NORMAL: #Writes to the config register may be ignored while in Normal mode normal_flag = True self.mode = MODE_SLEEP #So we switch to Sleep mode first self._write_register_byte(_BME280_REGISTER_CONFIG, self._config) if normal_flag: self.mode = MODE_NORMAL
[ "def", "_write_config", "(", "self", ")", ":", "normal_flag", "=", "False", "if", "self", ".", "_mode", "==", "MODE_NORMAL", ":", "#Writes to the config register may be ignored while in Normal mode", "normal_flag", "=", "True", "self", ".", "mode", "=", "MODE_SLEEP", "#So we switch to Sleep mode first", "self", ".", "_write_register_byte", "(", "_BME280_REGISTER_CONFIG", ",", "self", ".", "_config", ")", "if", "normal_flag", ":", "self", ".", "mode", "=", "MODE_NORMAL" ]
Write the value to the config register in the device
[ "Write", "the", "value", "to", "the", "config", "register", "in", "the", "device" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L178-L187
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._config
def _config(self): """Value to be written to the device's config register """ config = 0 if self.mode == MODE_NORMAL: config += (self._t_standby << 5) if self._iir_filter: config += (self._iir_filter << 2) return config
python
def _config(self): """Value to be written to the device's config register """ config = 0 if self.mode == MODE_NORMAL: config += (self._t_standby << 5) if self._iir_filter: config += (self._iir_filter << 2) return config
[ "def", "_config", "(", "self", ")", ":", "config", "=", "0", "if", "self", ".", "mode", "==", "MODE_NORMAL", ":", "config", "+=", "(", "self", ".", "_t_standby", "<<", "5", ")", "if", "self", ".", "_iir_filter", ":", "config", "+=", "(", "self", ".", "_iir_filter", "<<", "2", ")", "return", "config" ]
Value to be written to the device's config register
[ "Value", "to", "be", "written", "to", "the", "device", "s", "config", "register" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L282-L289
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._ctrl_meas
def _ctrl_meas(self): """Value to be written to the device's ctrl_meas register """ ctrl_meas = (self.overscan_temperature << 5) ctrl_meas += (self.overscan_pressure << 2) ctrl_meas += self.mode return ctrl_meas
python
def _ctrl_meas(self): """Value to be written to the device's ctrl_meas register """ ctrl_meas = (self.overscan_temperature << 5) ctrl_meas += (self.overscan_pressure << 2) ctrl_meas += self.mode return ctrl_meas
[ "def", "_ctrl_meas", "(", "self", ")", ":", "ctrl_meas", "=", "(", "self", ".", "overscan_temperature", "<<", "5", ")", "ctrl_meas", "+=", "(", "self", ".", "overscan_pressure", "<<", "2", ")", "ctrl_meas", "+=", "self", ".", "mode", "return", "ctrl_meas" ]
Value to be written to the device's ctrl_meas register
[ "Value", "to", "be", "written", "to", "the", "device", "s", "ctrl_meas", "register" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L292-L297
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280.measurement_time_typical
def measurement_time_typical(self): """Typical time in milliseconds required to complete a measurement in normal mode""" meas_time_ms = 1.0 if self.overscan_temperature != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_temperature)) if self.overscan_pressure != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_pressure) + 0.5) if self.overscan_humidity != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_humidity) + 0.5) return meas_time_ms
python
def measurement_time_typical(self): """Typical time in milliseconds required to complete a measurement in normal mode""" meas_time_ms = 1.0 if self.overscan_temperature != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_temperature)) if self.overscan_pressure != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_pressure) + 0.5) if self.overscan_humidity != OVERSCAN_DISABLE: meas_time_ms += (2 * _BME280_OVERSCANS.get(self.overscan_humidity) + 0.5) return meas_time_ms
[ "def", "measurement_time_typical", "(", "self", ")", ":", "meas_time_ms", "=", "1.0", "if", "self", ".", "overscan_temperature", "!=", "OVERSCAN_DISABLE", ":", "meas_time_ms", "+=", "(", "2", "*", "_BME280_OVERSCANS", ".", "get", "(", "self", ".", "overscan_temperature", ")", ")", "if", "self", ".", "overscan_pressure", "!=", "OVERSCAN_DISABLE", ":", "meas_time_ms", "+=", "(", "2", "*", "_BME280_OVERSCANS", ".", "get", "(", "self", ".", "overscan_pressure", ")", "+", "0.5", ")", "if", "self", ".", "overscan_humidity", "!=", "OVERSCAN_DISABLE", ":", "meas_time_ms", "+=", "(", "2", "*", "_BME280_OVERSCANS", ".", "get", "(", "self", ".", "overscan_humidity", ")", "+", "0.5", ")", "return", "meas_time_ms" ]
Typical time in milliseconds required to complete a measurement in normal mode
[ "Typical", "time", "in", "milliseconds", "required", "to", "complete", "a", "measurement", "in", "normal", "mode" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L300-L309
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280.pressure
def pressure(self): """ The compensated pressure in hectoPascals. returns None if pressure measurement is disabled """ self._read_temperature() # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c adc = self._read24(_BME280_REGISTER_PRESSUREDATA) / 16 # lowest 4 bits get dropped var1 = float(self._t_fine) / 2.0 - 64000.0 var2 = var1 * var1 * self._pressure_calib[5] / 32768.0 var2 = var2 + var1 * self._pressure_calib[4] * 2.0 var2 = var2 / 4.0 + self._pressure_calib[3] * 65536.0 var3 = self._pressure_calib[2] * var1 * var1 / 524288.0 var1 = (var3 + self._pressure_calib[1] * var1) / 524288.0 var1 = (1.0 + var1 / 32768.0) * self._pressure_calib[0] if var1 == 0: return 0 if var1: pressure = 1048576.0 - adc pressure = ((pressure - var2 / 4096.0) * 6250.0) / var1 var1 = self._pressure_calib[8] * pressure * pressure / 2147483648.0 var2 = pressure * self._pressure_calib[7] / 32768.0 pressure = pressure + (var1 + var2 + self._pressure_calib[6]) / 16.0 pressure /= 100 if pressure < _BME280_PRESSURE_MIN_HPA: return _BME280_PRESSURE_MIN_HPA if pressure > _BME280_PRESSURE_MAX_HPA: return _BME280_PRESSURE_MAX_HPA return pressure else: return _BME280_PRESSURE_MIN_HPA
python
def pressure(self): """ The compensated pressure in hectoPascals. returns None if pressure measurement is disabled """ self._read_temperature() # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c adc = self._read24(_BME280_REGISTER_PRESSUREDATA) / 16 # lowest 4 bits get dropped var1 = float(self._t_fine) / 2.0 - 64000.0 var2 = var1 * var1 * self._pressure_calib[5] / 32768.0 var2 = var2 + var1 * self._pressure_calib[4] * 2.0 var2 = var2 / 4.0 + self._pressure_calib[3] * 65536.0 var3 = self._pressure_calib[2] * var1 * var1 / 524288.0 var1 = (var3 + self._pressure_calib[1] * var1) / 524288.0 var1 = (1.0 + var1 / 32768.0) * self._pressure_calib[0] if var1 == 0: return 0 if var1: pressure = 1048576.0 - adc pressure = ((pressure - var2 / 4096.0) * 6250.0) / var1 var1 = self._pressure_calib[8] * pressure * pressure / 2147483648.0 var2 = pressure * self._pressure_calib[7] / 32768.0 pressure = pressure + (var1 + var2 + self._pressure_calib[6]) / 16.0 pressure /= 100 if pressure < _BME280_PRESSURE_MIN_HPA: return _BME280_PRESSURE_MIN_HPA if pressure > _BME280_PRESSURE_MAX_HPA: return _BME280_PRESSURE_MAX_HPA return pressure else: return _BME280_PRESSURE_MIN_HPA
[ "def", "pressure", "(", "self", ")", ":", "self", ".", "_read_temperature", "(", ")", "# Algorithm from the BME280 driver", "# https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c", "adc", "=", "self", ".", "_read24", "(", "_BME280_REGISTER_PRESSUREDATA", ")", "/", "16", "# lowest 4 bits get dropped", "var1", "=", "float", "(", "self", ".", "_t_fine", ")", "/", "2.0", "-", "64000.0", "var2", "=", "var1", "*", "var1", "*", "self", ".", "_pressure_calib", "[", "5", "]", "/", "32768.0", "var2", "=", "var2", "+", "var1", "*", "self", ".", "_pressure_calib", "[", "4", "]", "*", "2.0", "var2", "=", "var2", "/", "4.0", "+", "self", ".", "_pressure_calib", "[", "3", "]", "*", "65536.0", "var3", "=", "self", ".", "_pressure_calib", "[", "2", "]", "*", "var1", "*", "var1", "/", "524288.0", "var1", "=", "(", "var3", "+", "self", ".", "_pressure_calib", "[", "1", "]", "*", "var1", ")", "/", "524288.0", "var1", "=", "(", "1.0", "+", "var1", "/", "32768.0", ")", "*", "self", ".", "_pressure_calib", "[", "0", "]", "if", "var1", "==", "0", ":", "return", "0", "if", "var1", ":", "pressure", "=", "1048576.0", "-", "adc", "pressure", "=", "(", "(", "pressure", "-", "var2", "/", "4096.0", ")", "*", "6250.0", ")", "/", "var1", "var1", "=", "self", ".", "_pressure_calib", "[", "8", "]", "*", "pressure", "*", "pressure", "/", "2147483648.0", "var2", "=", "pressure", "*", "self", ".", "_pressure_calib", "[", "7", "]", "/", "32768.0", "pressure", "=", "pressure", "+", "(", "var1", "+", "var2", "+", "self", ".", "_pressure_calib", "[", "6", "]", ")", "/", "16.0", "pressure", "/=", "100", "if", "pressure", "<", "_BME280_PRESSURE_MIN_HPA", ":", "return", "_BME280_PRESSURE_MIN_HPA", "if", "pressure", ">", "_BME280_PRESSURE_MAX_HPA", ":", "return", "_BME280_PRESSURE_MAX_HPA", "return", "pressure", "else", ":", "return", "_BME280_PRESSURE_MIN_HPA" ]
The compensated pressure in hectoPascals. returns None if pressure measurement is disabled
[ "The", "compensated", "pressure", "in", "hectoPascals", ".", "returns", "None", "if", "pressure", "measurement", "is", "disabled" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L330-L363
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280.humidity
def humidity(self): """ The relative humidity in RH % returns None if humidity measurement is disabled """ self._read_temperature() hum = self._read_register(_BME280_REGISTER_HUMIDDATA, 2) #print("Humidity data: ", hum) adc = float(hum[0] << 8 | hum[1]) #print("adc:", adc) # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c var1 = float(self._t_fine) - 76800.0 #print("var1 ", var1) var2 = (self._humidity_calib[3] * 64.0 + (self._humidity_calib[4] / 16384.0) * var1) #print("var2 ",var2) var3 = adc - var2 #print("var3 ",var3) var4 = self._humidity_calib[1] / 65536.0 #print("var4 ",var4) var5 = (1.0 + (self._humidity_calib[2] / 67108864.0) * var1) #print("var5 ",var5) var6 = 1.0 + (self._humidity_calib[5] / 67108864.0) * var1 * var5 #print("var6 ",var6) var6 = var3 * var4 * (var5 * var6) humidity = var6 * (1.0 - self._humidity_calib[0] * var6 / 524288.0) if humidity > _BME280_HUMIDITY_MAX: return _BME280_HUMIDITY_MAX if humidity < _BME280_HUMIDITY_MIN: return _BME280_HUMIDITY_MIN # else... return humidity
python
def humidity(self): """ The relative humidity in RH % returns None if humidity measurement is disabled """ self._read_temperature() hum = self._read_register(_BME280_REGISTER_HUMIDDATA, 2) #print("Humidity data: ", hum) adc = float(hum[0] << 8 | hum[1]) #print("adc:", adc) # Algorithm from the BME280 driver # https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c var1 = float(self._t_fine) - 76800.0 #print("var1 ", var1) var2 = (self._humidity_calib[3] * 64.0 + (self._humidity_calib[4] / 16384.0) * var1) #print("var2 ",var2) var3 = adc - var2 #print("var3 ",var3) var4 = self._humidity_calib[1] / 65536.0 #print("var4 ",var4) var5 = (1.0 + (self._humidity_calib[2] / 67108864.0) * var1) #print("var5 ",var5) var6 = 1.0 + (self._humidity_calib[5] / 67108864.0) * var1 * var5 #print("var6 ",var6) var6 = var3 * var4 * (var5 * var6) humidity = var6 * (1.0 - self._humidity_calib[0] * var6 / 524288.0) if humidity > _BME280_HUMIDITY_MAX: return _BME280_HUMIDITY_MAX if humidity < _BME280_HUMIDITY_MIN: return _BME280_HUMIDITY_MIN # else... return humidity
[ "def", "humidity", "(", "self", ")", ":", "self", ".", "_read_temperature", "(", ")", "hum", "=", "self", ".", "_read_register", "(", "_BME280_REGISTER_HUMIDDATA", ",", "2", ")", "#print(\"Humidity data: \", hum)", "adc", "=", "float", "(", "hum", "[", "0", "]", "<<", "8", "|", "hum", "[", "1", "]", ")", "#print(\"adc:\", adc)", "# Algorithm from the BME280 driver", "# https://github.com/BoschSensortec/BME280_driver/blob/master/bme280.c", "var1", "=", "float", "(", "self", ".", "_t_fine", ")", "-", "76800.0", "#print(\"var1 \", var1)", "var2", "=", "(", "self", ".", "_humidity_calib", "[", "3", "]", "*", "64.0", "+", "(", "self", ".", "_humidity_calib", "[", "4", "]", "/", "16384.0", ")", "*", "var1", ")", "#print(\"var2 \",var2)", "var3", "=", "adc", "-", "var2", "#print(\"var3 \",var3)", "var4", "=", "self", ".", "_humidity_calib", "[", "1", "]", "/", "65536.0", "#print(\"var4 \",var4)", "var5", "=", "(", "1.0", "+", "(", "self", ".", "_humidity_calib", "[", "2", "]", "/", "67108864.0", ")", "*", "var1", ")", "#print(\"var5 \",var5)", "var6", "=", "1.0", "+", "(", "self", ".", "_humidity_calib", "[", "5", "]", "/", "67108864.0", ")", "*", "var1", "*", "var5", "#print(\"var6 \",var6)", "var6", "=", "var3", "*", "var4", "*", "(", "var5", "*", "var6", ")", "humidity", "=", "var6", "*", "(", "1.0", "-", "self", ".", "_humidity_calib", "[", "0", "]", "*", "var6", "/", "524288.0", ")", "if", "humidity", ">", "_BME280_HUMIDITY_MAX", ":", "return", "_BME280_HUMIDITY_MAX", "if", "humidity", "<", "_BME280_HUMIDITY_MIN", ":", "return", "_BME280_HUMIDITY_MIN", "# else...", "return", "humidity" ]
The relative humidity in RH % returns None if humidity measurement is disabled
[ "The", "relative", "humidity", "in", "RH", "%", "returns", "None", "if", "humidity", "measurement", "is", "disabled" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L366-L399
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._read_coefficients
def _read_coefficients(self): """Read & save the calibration coefficients""" coeff = self._read_register(_BME280_REGISTER_DIG_T1, 24) coeff = list(struct.unpack('<HhhHhhhhhhhh', bytes(coeff))) coeff = [float(i) for i in coeff] self._temp_calib = coeff[:3] self._pressure_calib = coeff[3:] self._humidity_calib = [0]*6 self._humidity_calib[0] = self._read_byte(_BME280_REGISTER_DIG_H1) coeff = self._read_register(_BME280_REGISTER_DIG_H2, 7) coeff = list(struct.unpack('<hBBBBb', bytes(coeff))) self._humidity_calib[1] = float(coeff[0]) self._humidity_calib[2] = float(coeff[1]) self._humidity_calib[3] = float((coeff[2] << 4) | (coeff[3] & 0xF)) self._humidity_calib[4] = float((coeff[4] << 4) | (coeff[3] >> 4)) self._humidity_calib[5] = float(coeff[5])
python
def _read_coefficients(self): """Read & save the calibration coefficients""" coeff = self._read_register(_BME280_REGISTER_DIG_T1, 24) coeff = list(struct.unpack('<HhhHhhhhhhhh', bytes(coeff))) coeff = [float(i) for i in coeff] self._temp_calib = coeff[:3] self._pressure_calib = coeff[3:] self._humidity_calib = [0]*6 self._humidity_calib[0] = self._read_byte(_BME280_REGISTER_DIG_H1) coeff = self._read_register(_BME280_REGISTER_DIG_H2, 7) coeff = list(struct.unpack('<hBBBBb', bytes(coeff))) self._humidity_calib[1] = float(coeff[0]) self._humidity_calib[2] = float(coeff[1]) self._humidity_calib[3] = float((coeff[2] << 4) | (coeff[3] & 0xF)) self._humidity_calib[4] = float((coeff[4] << 4) | (coeff[3] >> 4)) self._humidity_calib[5] = float(coeff[5])
[ "def", "_read_coefficients", "(", "self", ")", ":", "coeff", "=", "self", ".", "_read_register", "(", "_BME280_REGISTER_DIG_T1", ",", "24", ")", "coeff", "=", "list", "(", "struct", ".", "unpack", "(", "'<HhhHhhhhhhhh'", ",", "bytes", "(", "coeff", ")", ")", ")", "coeff", "=", "[", "float", "(", "i", ")", "for", "i", "in", "coeff", "]", "self", ".", "_temp_calib", "=", "coeff", "[", ":", "3", "]", "self", ".", "_pressure_calib", "=", "coeff", "[", "3", ":", "]", "self", ".", "_humidity_calib", "=", "[", "0", "]", "*", "6", "self", ".", "_humidity_calib", "[", "0", "]", "=", "self", ".", "_read_byte", "(", "_BME280_REGISTER_DIG_H1", ")", "coeff", "=", "self", ".", "_read_register", "(", "_BME280_REGISTER_DIG_H2", ",", "7", ")", "coeff", "=", "list", "(", "struct", ".", "unpack", "(", "'<hBBBBb'", ",", "bytes", "(", "coeff", ")", ")", ")", "self", ".", "_humidity_calib", "[", "1", "]", "=", "float", "(", "coeff", "[", "0", "]", ")", "self", ".", "_humidity_calib", "[", "2", "]", "=", "float", "(", "coeff", "[", "1", "]", ")", "self", ".", "_humidity_calib", "[", "3", "]", "=", "float", "(", "(", "coeff", "[", "2", "]", "<<", "4", ")", "|", "(", "coeff", "[", "3", "]", "&", "0xF", ")", ")", "self", ".", "_humidity_calib", "[", "4", "]", "=", "float", "(", "(", "coeff", "[", "4", "]", "<<", "4", ")", "|", "(", "coeff", "[", "3", "]", ">>", "4", ")", ")", "self", ".", "_humidity_calib", "[", "5", "]", "=", "float", "(", "coeff", "[", "5", "]", ")" ]
Read & save the calibration coefficients
[ "Read", "&", "save", "the", "calibration", "coefficients" ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L408-L424
train
adafruit/Adafruit_CircuitPython_BME280
adafruit_bme280.py
Adafruit_BME280._read24
def _read24(self, register): """Read an unsigned 24-bit value as a floating point and return it.""" ret = 0.0 for b in self._read_register(register, 3): ret *= 256.0 ret += float(b & 0xFF) return ret
python
def _read24(self, register): """Read an unsigned 24-bit value as a floating point and return it.""" ret = 0.0 for b in self._read_register(register, 3): ret *= 256.0 ret += float(b & 0xFF) return ret
[ "def", "_read24", "(", "self", ",", "register", ")", ":", "ret", "=", "0.0", "for", "b", "in", "self", ".", "_read_register", "(", "register", ",", "3", ")", ":", "ret", "*=", "256.0", "ret", "+=", "float", "(", "b", "&", "0xFF", ")", "return", "ret" ]
Read an unsigned 24-bit value as a floating point and return it.
[ "Read", "an", "unsigned", "24", "-", "bit", "value", "as", "a", "floating", "point", "and", "return", "it", "." ]
febcd51983dc2bc3cd006bacaada505251c39af1
https://github.com/adafruit/Adafruit_CircuitPython_BME280/blob/febcd51983dc2bc3cd006bacaada505251c39af1/adafruit_bme280.py#L430-L436
train
ArangoDB-Community/pyArango
pyArango/index.py
Index._create
def _create(self, postData) : """Creates an index of any type according to postData""" if self.infos is None : r = self.connection.session.post(self.indexesURL, params = {"collection" : self.collection.name}, data = json.dumps(postData, default=str)) data = r.json() if (r.status_code >= 400) or data['error'] : raise CreationError(data['errorMessage'], data) self.infos = data
python
def _create(self, postData) : """Creates an index of any type according to postData""" if self.infos is None : r = self.connection.session.post(self.indexesURL, params = {"collection" : self.collection.name}, data = json.dumps(postData, default=str)) data = r.json() if (r.status_code >= 400) or data['error'] : raise CreationError(data['errorMessage'], data) self.infos = data
[ "def", "_create", "(", "self", ",", "postData", ")", ":", "if", "self", ".", "infos", "is", "None", ":", "r", "=", "self", ".", "connection", ".", "session", ".", "post", "(", "self", ".", "indexesURL", ",", "params", "=", "{", "\"collection\"", ":", "self", ".", "collection", ".", "name", "}", ",", "data", "=", "json", ".", "dumps", "(", "postData", ",", "default", "=", "str", ")", ")", "data", "=", "r", ".", "json", "(", ")", "if", "(", "r", ".", "status_code", ">=", "400", ")", "or", "data", "[", "'error'", "]", ":", "raise", "CreationError", "(", "data", "[", "'errorMessage'", "]", ",", "data", ")", "self", ".", "infos", "=", "data" ]
Creates an index of any type according to postData
[ "Creates", "an", "index", "of", "any", "type", "according", "to", "postData" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/index.py#L22-L29
train
ArangoDB-Community/pyArango
pyArango/graph.py
Graph.createVertex
def createVertex(self, collectionName, docAttributes, waitForSync = False) : """adds a vertex to the graph and returns it""" url = "%s/vertex/%s" % (self.URL, collectionName) store = DOC.DocumentStore(self.database[collectionName], validators=self.database[collectionName]._fields, initDct=docAttributes) # self.database[collectionName].validateDct(docAttributes) store.validate() r = self.connection.session.post(url, data = json.dumps(docAttributes, default=str), params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 201 or r.status_code == 202 : return self.database[collectionName][data["vertex"]["_key"]] raise CreationError("Unable to create vertice, %s" % data["errorMessage"], data)
python
def createVertex(self, collectionName, docAttributes, waitForSync = False) : """adds a vertex to the graph and returns it""" url = "%s/vertex/%s" % (self.URL, collectionName) store = DOC.DocumentStore(self.database[collectionName], validators=self.database[collectionName]._fields, initDct=docAttributes) # self.database[collectionName].validateDct(docAttributes) store.validate() r = self.connection.session.post(url, data = json.dumps(docAttributes, default=str), params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 201 or r.status_code == 202 : return self.database[collectionName][data["vertex"]["_key"]] raise CreationError("Unable to create vertice, %s" % data["errorMessage"], data)
[ "def", "createVertex", "(", "self", ",", "collectionName", ",", "docAttributes", ",", "waitForSync", "=", "False", ")", ":", "url", "=", "\"%s/vertex/%s\"", "%", "(", "self", ".", "URL", ",", "collectionName", ")", "store", "=", "DOC", ".", "DocumentStore", "(", "self", ".", "database", "[", "collectionName", "]", ",", "validators", "=", "self", ".", "database", "[", "collectionName", "]", ".", "_fields", ",", "initDct", "=", "docAttributes", ")", "# self.database[collectionName].validateDct(docAttributes)", "store", ".", "validate", "(", ")", "r", "=", "self", ".", "connection", ".", "session", ".", "post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "docAttributes", ",", "default", "=", "str", ")", ",", "params", "=", "{", "'waitForSync'", ":", "waitForSync", "}", ")", "data", "=", "r", ".", "json", "(", ")", "if", "r", ".", "status_code", "==", "201", "or", "r", ".", "status_code", "==", "202", ":", "return", "self", ".", "database", "[", "collectionName", "]", "[", "data", "[", "\"vertex\"", "]", "[", "\"_key\"", "]", "]", "raise", "CreationError", "(", "\"Unable to create vertice, %s\"", "%", "data", "[", "\"errorMessage\"", "]", ",", "data", ")" ]
adds a vertex to the graph and returns it
[ "adds", "a", "vertex", "to", "the", "graph", "and", "returns", "it" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/graph.py#L115-L129
train
ArangoDB-Community/pyArango
pyArango/graph.py
Graph.deleteVertex
def deleteVertex(self, document, waitForSync = False) : """deletes a vertex from the graph as well as al linked edges""" url = "%s/vertex/%s" % (self.URL, document._id) r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 200 or r.status_code == 202 : return True raise DeletionError("Unable to delete vertice, %s" % document._id, data)
python
def deleteVertex(self, document, waitForSync = False) : """deletes a vertex from the graph as well as al linked edges""" url = "%s/vertex/%s" % (self.URL, document._id) r = self.connection.session.delete(url, params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 200 or r.status_code == 202 : return True raise DeletionError("Unable to delete vertice, %s" % document._id, data)
[ "def", "deleteVertex", "(", "self", ",", "document", ",", "waitForSync", "=", "False", ")", ":", "url", "=", "\"%s/vertex/%s\"", "%", "(", "self", ".", "URL", ",", "document", ".", "_id", ")", "r", "=", "self", ".", "connection", ".", "session", ".", "delete", "(", "url", ",", "params", "=", "{", "'waitForSync'", ":", "waitForSync", "}", ")", "data", "=", "r", ".", "json", "(", ")", "if", "r", ".", "status_code", "==", "200", "or", "r", ".", "status_code", "==", "202", ":", "return", "True", "raise", "DeletionError", "(", "\"Unable to delete vertice, %s\"", "%", "document", ".", "_id", ",", "data", ")" ]
deletes a vertex from the graph as well as al linked edges
[ "deletes", "a", "vertex", "from", "the", "graph", "as", "well", "as", "al", "linked", "edges" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/graph.py#L131-L140
train
ArangoDB-Community/pyArango
pyArango/graph.py
Graph.createEdge
def createEdge(self, collectionName, _fromId, _toId, edgeAttributes, waitForSync = False) : """creates an edge between two documents""" if not _fromId : raise ValueError("Invalid _fromId: %s" % _fromId) if not _toId : raise ValueError("Invalid _toId: %s" % _toId) if collectionName not in self.definitions : raise KeyError("'%s' is not among the edge definitions" % collectionName) url = "%s/edge/%s" % (self.URL, collectionName) self.database[collectionName].validatePrivate("_from", _fromId) self.database[collectionName].validatePrivate("_to", _toId) ed = self.database[collectionName].createEdge() ed.set(edgeAttributes) ed.validate() payload = ed.getStore() payload.update({'_from' : _fromId, '_to' : _toId}) r = self.connection.session.post(url, data = json.dumps(payload, default=str), params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 201 or r.status_code == 202 : return self.database[collectionName][data["edge"]["_key"]] # print "\ngraph 160, ", data, payload, _fromId raise CreationError("Unable to create edge, %s" % r.json()["errorMessage"], data)
python
def createEdge(self, collectionName, _fromId, _toId, edgeAttributes, waitForSync = False) : """creates an edge between two documents""" if not _fromId : raise ValueError("Invalid _fromId: %s" % _fromId) if not _toId : raise ValueError("Invalid _toId: %s" % _toId) if collectionName not in self.definitions : raise KeyError("'%s' is not among the edge definitions" % collectionName) url = "%s/edge/%s" % (self.URL, collectionName) self.database[collectionName].validatePrivate("_from", _fromId) self.database[collectionName].validatePrivate("_to", _toId) ed = self.database[collectionName].createEdge() ed.set(edgeAttributes) ed.validate() payload = ed.getStore() payload.update({'_from' : _fromId, '_to' : _toId}) r = self.connection.session.post(url, data = json.dumps(payload, default=str), params = {'waitForSync' : waitForSync}) data = r.json() if r.status_code == 201 or r.status_code == 202 : return self.database[collectionName][data["edge"]["_key"]] # print "\ngraph 160, ", data, payload, _fromId raise CreationError("Unable to create edge, %s" % r.json()["errorMessage"], data)
[ "def", "createEdge", "(", "self", ",", "collectionName", ",", "_fromId", ",", "_toId", ",", "edgeAttributes", ",", "waitForSync", "=", "False", ")", ":", "if", "not", "_fromId", ":", "raise", "ValueError", "(", "\"Invalid _fromId: %s\"", "%", "_fromId", ")", "if", "not", "_toId", ":", "raise", "ValueError", "(", "\"Invalid _toId: %s\"", "%", "_toId", ")", "if", "collectionName", "not", "in", "self", ".", "definitions", ":", "raise", "KeyError", "(", "\"'%s' is not among the edge definitions\"", "%", "collectionName", ")", "url", "=", "\"%s/edge/%s\"", "%", "(", "self", ".", "URL", ",", "collectionName", ")", "self", ".", "database", "[", "collectionName", "]", ".", "validatePrivate", "(", "\"_from\"", ",", "_fromId", ")", "self", ".", "database", "[", "collectionName", "]", ".", "validatePrivate", "(", "\"_to\"", ",", "_toId", ")", "ed", "=", "self", ".", "database", "[", "collectionName", "]", ".", "createEdge", "(", ")", "ed", ".", "set", "(", "edgeAttributes", ")", "ed", ".", "validate", "(", ")", "payload", "=", "ed", ".", "getStore", "(", ")", "payload", ".", "update", "(", "{", "'_from'", ":", "_fromId", ",", "'_to'", ":", "_toId", "}", ")", "r", "=", "self", ".", "connection", ".", "session", ".", "post", "(", "url", ",", "data", "=", "json", ".", "dumps", "(", "payload", ",", "default", "=", "str", ")", ",", "params", "=", "{", "'waitForSync'", ":", "waitForSync", "}", ")", "data", "=", "r", ".", "json", "(", ")", "if", "r", ".", "status_code", "==", "201", "or", "r", ".", "status_code", "==", "202", ":", "return", "self", ".", "database", "[", "collectionName", "]", "[", "data", "[", "\"edge\"", "]", "[", "\"_key\"", "]", "]", "# print \"\\ngraph 160, \", data, payload, _fromId", "raise", "CreationError", "(", "\"Unable to create edge, %s\"", "%", "r", ".", "json", "(", ")", "[", "\"errorMessage\"", "]", ",", "data", ")" ]
creates an edge between two documents
[ "creates", "an", "edge", "between", "two", "documents" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/graph.py#L142-L170
train
ArangoDB-Community/pyArango
pyArango/graph.py
Graph.link
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False) : "A shorthand for createEdge that takes two documents as input" if type(doc1) is DOC.Document : if not doc1._id : doc1.save() doc1_id = doc1._id else : doc1_id = doc1 if type(doc2) is DOC.Document : if not doc2._id : doc2.save() doc2_id = doc2._id else : doc2_id = doc2 return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync)
python
def link(self, definition, doc1, doc2, edgeAttributes, waitForSync = False) : "A shorthand for createEdge that takes two documents as input" if type(doc1) is DOC.Document : if not doc1._id : doc1.save() doc1_id = doc1._id else : doc1_id = doc1 if type(doc2) is DOC.Document : if not doc2._id : doc2.save() doc2_id = doc2._id else : doc2_id = doc2 return self.createEdge(definition, doc1_id, doc2_id, edgeAttributes, waitForSync)
[ "def", "link", "(", "self", ",", "definition", ",", "doc1", ",", "doc2", ",", "edgeAttributes", ",", "waitForSync", "=", "False", ")", ":", "if", "type", "(", "doc1", ")", "is", "DOC", ".", "Document", ":", "if", "not", "doc1", ".", "_id", ":", "doc1", ".", "save", "(", ")", "doc1_id", "=", "doc1", ".", "_id", "else", ":", "doc1_id", "=", "doc1", "if", "type", "(", "doc2", ")", "is", "DOC", ".", "Document", ":", "if", "not", "doc2", ".", "_id", ":", "doc2", ".", "save", "(", ")", "doc2_id", "=", "doc2", ".", "_id", "else", ":", "doc2_id", "=", "doc2", "return", "self", ".", "createEdge", "(", "definition", ",", "doc1_id", ",", "doc2_id", ",", "edgeAttributes", ",", "waitForSync", ")" ]
A shorthand for createEdge that takes two documents as input
[ "A", "shorthand", "for", "createEdge", "that", "takes", "two", "documents", "as", "input" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/graph.py#L172-L188
train
ArangoDB-Community/pyArango
pyArango/graph.py
Graph.unlink
def unlink(self, definition, doc1, doc2) : "deletes all links between doc1 and doc2" links = self.database[definition].fetchByExample( {"_from": doc1._id,"_to" : doc2._id}, batchSize = 100) for l in links : self.deleteEdge(l)
python
def unlink(self, definition, doc1, doc2) : "deletes all links between doc1 and doc2" links = self.database[definition].fetchByExample( {"_from": doc1._id,"_to" : doc2._id}, batchSize = 100) for l in links : self.deleteEdge(l)
[ "def", "unlink", "(", "self", ",", "definition", ",", "doc1", ",", "doc2", ")", ":", "links", "=", "self", ".", "database", "[", "definition", "]", ".", "fetchByExample", "(", "{", "\"_from\"", ":", "doc1", ".", "_id", ",", "\"_to\"", ":", "doc2", ".", "_id", "}", ",", "batchSize", "=", "100", ")", "for", "l", "in", "links", ":", "self", ".", "deleteEdge", "(", "l", ")" ]
deletes all links between doc1 and doc2
[ "deletes", "all", "links", "between", "doc1", "and", "doc2" ]
dd72e5f6c540e5e148943d615ddf7553bb78ce0b
https://github.com/ArangoDB-Community/pyArango/blob/dd72e5f6c540e5e148943d615ddf7553bb78ce0b/pyArango/graph.py#L190-L194
train