repository_name
stringlengths
5
67
func_path_in_repository
stringlengths
4
234
func_name
stringlengths
0
314
whole_func_string
stringlengths
52
3.87M
language
stringclasses
6 values
func_code_string
stringlengths
52
3.87M
func_documentation_string
stringlengths
1
47.2k
func_code_url
stringlengths
85
339
progrium/skypipe
skypipe/client.py
sp_msg
def sp_msg(cmd, pipe=None, data=None): """Produces skypipe protocol multipart message""" msg = [SP_HEADER, cmd] if pipe is not None: msg.append(pipe) if data is not None: msg.append(data) return msg
python
def sp_msg(cmd, pipe=None, data=None): """Produces skypipe protocol multipart message""" msg = [SP_HEADER, cmd] if pipe is not None: msg.append(pipe) if data is not None: msg.append(data) return msg
Produces skypipe protocol multipart message
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L42-L49
progrium/skypipe
skypipe/client.py
check_skypipe_endpoint
def check_skypipe_endpoint(endpoint, timeout=10): """Skypipe endpoint checker -- pings endpoint Returns True if endpoint replies with valid header, Returns False if endpoint replies with invalid header, Returns None if endpoint does not reply within timeout """ socket = ctx.socket(zmq.DEALER) socket.linger = 0 socket.connect(endpoint) socket.send_multipart(sp_msg(SP_CMD_HELLO)) timeout_time = time.time() + timeout while time.time() < timeout_time: reply = None try: reply = socket.recv_multipart(zmq.NOBLOCK) break except zmq.ZMQError: time.sleep(0.1) socket.close() if reply: return str(reply.pop(0)) == SP_HEADER
python
def check_skypipe_endpoint(endpoint, timeout=10): """Skypipe endpoint checker -- pings endpoint Returns True if endpoint replies with valid header, Returns False if endpoint replies with invalid header, Returns None if endpoint does not reply within timeout """ socket = ctx.socket(zmq.DEALER) socket.linger = 0 socket.connect(endpoint) socket.send_multipart(sp_msg(SP_CMD_HELLO)) timeout_time = time.time() + timeout while time.time() < timeout_time: reply = None try: reply = socket.recv_multipart(zmq.NOBLOCK) break except zmq.ZMQError: time.sleep(0.1) socket.close() if reply: return str(reply.pop(0)) == SP_HEADER
Skypipe endpoint checker -- pings endpoint Returns True if endpoint replies with valid header, Returns False if endpoint replies with invalid header, Returns None if endpoint does not reply within timeout
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L51-L72
progrium/skypipe
skypipe/client.py
stream_skypipe_output
def stream_skypipe_output(endpoint, name=None): """Generator for reading skypipe data""" name = name or '' socket = ctx.socket(zmq.DEALER) socket.connect(endpoint) try: socket.send_multipart(sp_msg(SP_CMD_LISTEN, name)) while True: msg = socket.recv_multipart() try: data = parse_skypipe_data_stream(msg, name) if data: yield data except EOFError: raise StopIteration() finally: socket.send_multipart(sp_msg(SP_CMD_UNLISTEN, name)) socket.close()
python
def stream_skypipe_output(endpoint, name=None): """Generator for reading skypipe data""" name = name or '' socket = ctx.socket(zmq.DEALER) socket.connect(endpoint) try: socket.send_multipart(sp_msg(SP_CMD_LISTEN, name)) while True: msg = socket.recv_multipart() try: data = parse_skypipe_data_stream(msg, name) if data: yield data except EOFError: raise StopIteration() finally: socket.send_multipart(sp_msg(SP_CMD_UNLISTEN, name)) socket.close()
Generator for reading skypipe data
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L75-L94
progrium/skypipe
skypipe/client.py
parse_skypipe_data_stream
def parse_skypipe_data_stream(msg, for_pipe): """May return data from skypipe message or raises EOFError""" header = str(msg.pop(0)) command = str(msg.pop(0)) pipe_name = str(msg.pop(0)) data = str(msg.pop(0)) if header != SP_HEADER: return if pipe_name != for_pipe: return if command != SP_CMD_DATA: return if data == SP_DATA_EOF: raise EOFError() else: return data
python
def parse_skypipe_data_stream(msg, for_pipe): """May return data from skypipe message or raises EOFError""" header = str(msg.pop(0)) command = str(msg.pop(0)) pipe_name = str(msg.pop(0)) data = str(msg.pop(0)) if header != SP_HEADER: return if pipe_name != for_pipe: return if command != SP_CMD_DATA: return if data == SP_DATA_EOF: raise EOFError() else: return data
May return data from skypipe message or raises EOFError
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L96-L108
progrium/skypipe
skypipe/client.py
skypipe_input_stream
def skypipe_input_stream(endpoint, name=None): """Returns a context manager for streaming data into skypipe""" name = name or '' class context_manager(object): def __enter__(self): self.socket = ctx.socket(zmq.DEALER) self.socket.connect(endpoint) return self def send(self, data): data_msg = sp_msg(SP_CMD_DATA, name, data) self.socket.send_multipart(data_msg) def __exit__(self, *args, **kwargs): eof_msg = sp_msg(SP_CMD_DATA, name, SP_DATA_EOF) self.socket.send_multipart(eof_msg) self.socket.close() return context_manager()
python
def skypipe_input_stream(endpoint, name=None): """Returns a context manager for streaming data into skypipe""" name = name or '' class context_manager(object): def __enter__(self): self.socket = ctx.socket(zmq.DEALER) self.socket.connect(endpoint) return self def send(self, data): data_msg = sp_msg(SP_CMD_DATA, name, data) self.socket.send_multipart(data_msg) def __exit__(self, *args, **kwargs): eof_msg = sp_msg(SP_CMD_DATA, name, SP_DATA_EOF) self.socket.send_multipart(eof_msg) self.socket.close() return context_manager()
Returns a context manager for streaming data into skypipe
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L110-L128
progrium/skypipe
skypipe/client.py
stream_stdin_lines
def stream_stdin_lines(): """Generator for unbuffered line reading from STDIN""" stdin = os.fdopen(sys.stdin.fileno(), 'r', 0) while True: line = stdin.readline() if line: yield line else: break
python
def stream_stdin_lines(): """Generator for unbuffered line reading from STDIN""" stdin = os.fdopen(sys.stdin.fileno(), 'r', 0) while True: line = stdin.readline() if line: yield line else: break
Generator for unbuffered line reading from STDIN
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L130-L138
progrium/skypipe
skypipe/client.py
run
def run(endpoint, name=None): """Runs the skypipe client""" try: if os.isatty(0): # output mode for data in stream_skypipe_output(endpoint, name): sys.stdout.write(data) sys.stdout.flush() else: # input mode with skypipe_input_stream(endpoint, name) as stream: for line in stream_stdin_lines(): stream.send(line) except KeyboardInterrupt: pass
python
def run(endpoint, name=None): """Runs the skypipe client""" try: if os.isatty(0): # output mode for data in stream_skypipe_output(endpoint, name): sys.stdout.write(data) sys.stdout.flush() else: # input mode with skypipe_input_stream(endpoint, name) as stream: for line in stream_stdin_lines(): stream.send(line) except KeyboardInterrupt: pass
Runs the skypipe client
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/client.py#L140-L156
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.validate_time_inversion
def validate_time_inversion(self): """ Check time inversion of the time range. :raises ValueError: If |attr_start_datetime| is bigger than |attr_end_datetime|. :raises TypeError: Any one of |attr_start_datetime| and |attr_end_datetime|, or both is inappropriate datetime value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:10:00+0900", "2015-03-22T10:00:00+0900") try: time_range.validate_time_inversion() except ValueError: print "time inversion" :Output: .. parsed-literal:: time inversion """ if not self.is_set(): # for python2/3 compatibility raise TypeError if self.start_datetime > self.end_datetime: raise ValueError( "time inversion found: {:s} > {:s}".format( str(self.start_datetime), str(self.end_datetime) ) )
python
def validate_time_inversion(self): """ Check time inversion of the time range. :raises ValueError: If |attr_start_datetime| is bigger than |attr_end_datetime|. :raises TypeError: Any one of |attr_start_datetime| and |attr_end_datetime|, or both is inappropriate datetime value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:10:00+0900", "2015-03-22T10:00:00+0900") try: time_range.validate_time_inversion() except ValueError: print "time inversion" :Output: .. parsed-literal:: time inversion """ if not self.is_set(): # for python2/3 compatibility raise TypeError if self.start_datetime > self.end_datetime: raise ValueError( "time inversion found: {:s} > {:s}".format( str(self.start_datetime), str(self.end_datetime) ) )
Check time inversion of the time range. :raises ValueError: If |attr_start_datetime| is bigger than |attr_end_datetime|. :raises TypeError: Any one of |attr_start_datetime| and |attr_end_datetime|, or both is inappropriate datetime value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:10:00+0900", "2015-03-22T10:00:00+0900") try: time_range.validate_time_inversion() except ValueError: print "time inversion" :Output: .. parsed-literal:: time inversion
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L239-L274
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.get_start_time_str
def get_start_time_str(self): """ :return: |attr_start_datetime| as |str| formatted with |attr_start_time_format|. Return |NaT| if the invalid value or the invalid format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_start_time_str()) time_range.start_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_start_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 2015/03/22 10:00:00 """ try: return self.start_datetime.strftime(self.start_time_format) except AttributeError: return self.NOT_A_TIME_STR
python
def get_start_time_str(self): """ :return: |attr_start_datetime| as |str| formatted with |attr_start_time_format|. Return |NaT| if the invalid value or the invalid format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_start_time_str()) time_range.start_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_start_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 2015/03/22 10:00:00 """ try: return self.start_datetime.strftime(self.start_time_format) except AttributeError: return self.NOT_A_TIME_STR
:return: |attr_start_datetime| as |str| formatted with |attr_start_time_format|. Return |NaT| if the invalid value or the invalid format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_start_time_str()) time_range.start_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_start_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 2015/03/22 10:00:00
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L333-L359
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.get_end_time_str
def get_end_time_str(self): """ :return: |attr_end_datetime| as a |str| formatted with |attr_end_time_format|. Return |NaT| if invalid datetime or format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_end_time_str()) time_range.end_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_end_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:10:00+0900 2015/03/22 10:10:00 """ try: return self.end_datetime.strftime(self.end_time_format) except AttributeError: return self.NOT_A_TIME_STR
python
def get_end_time_str(self): """ :return: |attr_end_datetime| as a |str| formatted with |attr_end_time_format|. Return |NaT| if invalid datetime or format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_end_time_str()) time_range.end_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_end_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:10:00+0900 2015/03/22 10:10:00 """ try: return self.end_datetime.strftime(self.end_time_format) except AttributeError: return self.NOT_A_TIME_STR
:return: |attr_end_datetime| as a |str| formatted with |attr_end_time_format|. Return |NaT| if invalid datetime or format. :rtype: str :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range.get_end_time_str()) time_range.end_time_format = "%Y/%m/%d %H:%M:%S" print(time_range.get_end_time_str()) :Output: .. parsed-literal:: 2015-03-22T10:10:00+0900 2015/03/22 10:10:00
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L361-L387
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.set_start_datetime
def set_start_datetime(self, value, timezone=None): """ Set the start time of the time range. :param value: |param_start_datetime| :type value: |datetime|/|str| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_start_datetime("2015-03-22T10:00:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - NaT """ if value is None: self.__start_datetime = None return try: self.__start_datetime = typepy.type.DateTime( value, strict_level=typepy.StrictLevel.MIN, timezone=timezone ).convert() except typepy.TypeConversionError as e: raise ValueError(e)
python
def set_start_datetime(self, value, timezone=None): """ Set the start time of the time range. :param value: |param_start_datetime| :type value: |datetime|/|str| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_start_datetime("2015-03-22T10:00:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - NaT """ if value is None: self.__start_datetime = None return try: self.__start_datetime = typepy.type.DateTime( value, strict_level=typepy.StrictLevel.MIN, timezone=timezone ).convert() except typepy.TypeConversionError as e: raise ValueError(e)
Set the start time of the time range. :param value: |param_start_datetime| :type value: |datetime|/|str| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_start_datetime("2015-03-22T10:00:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - NaT
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L408-L440
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.set_end_datetime
def set_end_datetime(self, value, timezone=None): """ Set the end time of the time range. :param datetime.datetime/str value: |param_end_datetime| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_end_datetime("2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT NaT - 2015-03-22T10:10:00+0900 """ if value is None: self.__end_datetime = None return try: self.__end_datetime = typepy.type.DateTime( value, strict_level=typepy.StrictLevel.MIN, timezone=timezone ).convert() except typepy.TypeConversionError as e: raise ValueError(e)
python
def set_end_datetime(self, value, timezone=None): """ Set the end time of the time range. :param datetime.datetime/str value: |param_end_datetime| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_end_datetime("2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT NaT - 2015-03-22T10:10:00+0900 """ if value is None: self.__end_datetime = None return try: self.__end_datetime = typepy.type.DateTime( value, strict_level=typepy.StrictLevel.MIN, timezone=timezone ).convert() except typepy.TypeConversionError as e: raise ValueError(e)
Set the end time of the time range. :param datetime.datetime/str value: |param_end_datetime| :raises ValueError: If the value is invalid as a |datetime| value. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_end_datetime("2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT NaT - 2015-03-22T10:10:00+0900
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L442-L473
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.set_time_range
def set_time_range(self, start, end): """ :param datetime.datetime/str start: |param_start_datetime| :param datetime.datetime/str end: |param_end_datetime| :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_time_range("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 """ self.set_start_datetime(start) self.set_end_datetime(end)
python
def set_time_range(self, start, end): """ :param datetime.datetime/str start: |param_start_datetime| :param datetime.datetime/str end: |param_end_datetime| :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_time_range("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 """ self.set_start_datetime(start) self.set_end_datetime(end)
:param datetime.datetime/str start: |param_start_datetime| :param datetime.datetime/str end: |param_end_datetime| :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange() print(time_range) time_range.set_time_range("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") print(time_range) :Output: .. parsed-literal:: NaT - NaT 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L475-L496
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.range
def range(self, step): """ Return an iterator object. :param step: Step of iteration. :type step: |timedelta|/dateutil.relativedelta.relativedelta :return: iterator :rtype: iterator :Sample Code: .. code:: python import datetime from datetimerange import DateTimeRange time_range = DateTimeRange("2015-01-01T00:00:00+0900", "2015-01-04T00:00:00+0900") for value in time_range.range(datetime.timedelta(days=1)): print(value) :Output: .. parsed-literal:: 2015-01-01 00:00:00+09:00 2015-01-02 00:00:00+09:00 2015-01-03 00:00:00+09:00 2015-01-04 00:00:00+09:00 """ if self.__compare_timedelta(step, 0) == 0: raise ValueError("step must be not zero") is_inversion = False try: self.validate_time_inversion() except ValueError: is_inversion = True if not is_inversion: if self.__compare_timedelta(step, seconds=0) < 0: raise ValueError("invalid step: expect greater than 0, actual={}".format(step)) else: if self.__compare_timedelta(step, seconds=0) > 0: raise ValueError("invalid step: expect less than 0, actual={}".format(step)) current_datetime = self.start_datetime while current_datetime <= self.end_datetime: yield current_datetime current_datetime = current_datetime + step
python
def range(self, step): """ Return an iterator object. :param step: Step of iteration. :type step: |timedelta|/dateutil.relativedelta.relativedelta :return: iterator :rtype: iterator :Sample Code: .. code:: python import datetime from datetimerange import DateTimeRange time_range = DateTimeRange("2015-01-01T00:00:00+0900", "2015-01-04T00:00:00+0900") for value in time_range.range(datetime.timedelta(days=1)): print(value) :Output: .. parsed-literal:: 2015-01-01 00:00:00+09:00 2015-01-02 00:00:00+09:00 2015-01-03 00:00:00+09:00 2015-01-04 00:00:00+09:00 """ if self.__compare_timedelta(step, 0) == 0: raise ValueError("step must be not zero") is_inversion = False try: self.validate_time_inversion() except ValueError: is_inversion = True if not is_inversion: if self.__compare_timedelta(step, seconds=0) < 0: raise ValueError("invalid step: expect greater than 0, actual={}".format(step)) else: if self.__compare_timedelta(step, seconds=0) > 0: raise ValueError("invalid step: expect less than 0, actual={}".format(step)) current_datetime = self.start_datetime while current_datetime <= self.end_datetime: yield current_datetime current_datetime = current_datetime + step
Return an iterator object. :param step: Step of iteration. :type step: |timedelta|/dateutil.relativedelta.relativedelta :return: iterator :rtype: iterator :Sample Code: .. code:: python import datetime from datetimerange import DateTimeRange time_range = DateTimeRange("2015-01-01T00:00:00+0900", "2015-01-04T00:00:00+0900") for value in time_range.range(datetime.timedelta(days=1)): print(value) :Output: .. parsed-literal:: 2015-01-01 00:00:00+09:00 2015-01-02 00:00:00+09:00 2015-01-03 00:00:00+09:00 2015-01-04 00:00:00+09:00
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L552-L598
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.intersection
def intersection(self, x): """ Newly set a time range that overlaps the input and the current time range. :param DateTimeRange x: Value to compute intersection with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.intersection(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900 """ self.validate_time_inversion() x.validate_time_inversion() if any([x.start_datetime in self, self.start_datetime in x]): start_datetime = max(self.start_datetime, x.start_datetime) end_datetime = min(self.end_datetime, x.end_datetime) else: start_datetime = None end_datetime = None return DateTimeRange( start_datetime=start_datetime, end_datetime=end_datetime, start_time_format=self.start_time_format, end_time_format=self.end_time_format, )
python
def intersection(self, x): """ Newly set a time range that overlaps the input and the current time range. :param DateTimeRange x: Value to compute intersection with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.intersection(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900 """ self.validate_time_inversion() x.validate_time_inversion() if any([x.start_datetime in self, self.start_datetime in x]): start_datetime = max(self.start_datetime, x.start_datetime) end_datetime = min(self.end_datetime, x.end_datetime) else: start_datetime = None end_datetime = None return DateTimeRange( start_datetime=start_datetime, end_datetime=end_datetime, start_time_format=self.start_time_format, end_time_format=self.end_time_format, )
Newly set a time range that overlaps the input and the current time range. :param DateTimeRange x: Value to compute intersection with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.intersection(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:05:00+0900 - 2015-03-22T10:10:00+0900
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L600-L636
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.encompass
def encompass(self, x): """ Newly set a time range that encompasses the input and the current time range. :param DateTimeRange x: Value to compute encompass with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.encompass(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:15:00+0900 """ self.validate_time_inversion() x.validate_time_inversion() return DateTimeRange( start_datetime=min(self.start_datetime, x.start_datetime), end_datetime=max(self.end_datetime, x.end_datetime), start_time_format=self.start_time_format, end_time_format=self.end_time_format, )
python
def encompass(self, x): """ Newly set a time range that encompasses the input and the current time range. :param DateTimeRange x: Value to compute encompass with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.encompass(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:15:00+0900 """ self.validate_time_inversion() x.validate_time_inversion() return DateTimeRange( start_datetime=min(self.start_datetime, x.start_datetime), end_datetime=max(self.end_datetime, x.end_datetime), start_time_format=self.start_time_format, end_time_format=self.end_time_format, )
Newly set a time range that encompasses the input and the current time range. :param DateTimeRange x: Value to compute encompass with the current time range. :Sample Code: .. code:: python from datetimerange import DateTimeRange dtr0 = DateTimeRange("2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") dtr1 = DateTimeRange("2015-03-22T10:05:00+0900", "2015-03-22T10:15:00+0900") dtr0.encompass(dtr1) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:15:00+0900
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L638-L667
thombashi/DateTimeRange
datetimerange/__init__.py
DateTimeRange.truncate
def truncate(self, percentage): """ Truncate ``percentage`` / 2 [%] of whole time from first and last time. :param float percentage: Percentage of truncate. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange( "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") time_range.is_output_elapse = True print(time_range) time_range.truncate(10) print(time_range) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 (0:10:00) 2015-03-22T10:00:30+0900 - 2015-03-22T10:09:30+0900 (0:09:00) """ self.validate_time_inversion() if percentage < 0: raise ValueError("discard_percent must be greater or equal to zero: " + str(percentage)) if percentage == 0: return discard_time = self.timedelta // int(100) * int(percentage / 2) self.__start_datetime += discard_time self.__end_datetime -= discard_time
python
def truncate(self, percentage): """ Truncate ``percentage`` / 2 [%] of whole time from first and last time. :param float percentage: Percentage of truncate. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange( "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") time_range.is_output_elapse = True print(time_range) time_range.truncate(10) print(time_range) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 (0:10:00) 2015-03-22T10:00:30+0900 - 2015-03-22T10:09:30+0900 (0:09:00) """ self.validate_time_inversion() if percentage < 0: raise ValueError("discard_percent must be greater or equal to zero: " + str(percentage)) if percentage == 0: return discard_time = self.timedelta // int(100) * int(percentage / 2) self.__start_datetime += discard_time self.__end_datetime -= discard_time
Truncate ``percentage`` / 2 [%] of whole time from first and last time. :param float percentage: Percentage of truncate. :Sample Code: .. code:: python from datetimerange import DateTimeRange time_range = DateTimeRange( "2015-03-22T10:00:00+0900", "2015-03-22T10:10:00+0900") time_range.is_output_elapse = True print(time_range) time_range.truncate(10) print(time_range) :Output: .. parsed-literal:: 2015-03-22T10:00:00+0900 - 2015-03-22T10:10:00+0900 (0:10:00) 2015-03-22T10:00:30+0900 - 2015-03-22T10:09:30+0900 (0:09:00)
https://github.com/thombashi/DateTimeRange/blob/542a3b69ec256d28cc5d5469fd68207c1b509c9c/datetimerange/__init__.py#L669-L703
progrium/skypipe
skypipe/cloud.py
wait_for
def wait_for(text, finish=None, io=None): """Displays dots until returned event is set""" if finish: finish.set() time.sleep(0.1) # threads, sigh if not io: io = sys.stdout finish = threading.Event() io.write(text) def _wait(): while not finish.is_set(): io.write('.') io.flush() finish.wait(timeout=1) io.write('\n') threading.Thread(target=_wait).start() return finish
python
def wait_for(text, finish=None, io=None): """Displays dots until returned event is set""" if finish: finish.set() time.sleep(0.1) # threads, sigh if not io: io = sys.stdout finish = threading.Event() io.write(text) def _wait(): while not finish.is_set(): io.write('.') io.flush() finish.wait(timeout=1) io.write('\n') threading.Thread(target=_wait).start() return finish
Displays dots until returned event is set
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L37-L53
progrium/skypipe
skypipe/cloud.py
lookup_endpoint
def lookup_endpoint(cli): """Looks up the application endpoint from dotcloud""" url = '/applications/{0}/environment'.format(APPNAME) environ = cli.user.get(url).item port = environ['DOTCLOUD_SATELLITE_ZMQ_PORT'] host = socket.gethostbyname(environ['DOTCLOUD_SATELLITE_ZMQ_HOST']) return "tcp://{0}:{1}".format(host, port)
python
def lookup_endpoint(cli): """Looks up the application endpoint from dotcloud""" url = '/applications/{0}/environment'.format(APPNAME) environ = cli.user.get(url).item port = environ['DOTCLOUD_SATELLITE_ZMQ_PORT'] host = socket.gethostbyname(environ['DOTCLOUD_SATELLITE_ZMQ_HOST']) return "tcp://{0}:{1}".format(host, port)
Looks up the application endpoint from dotcloud
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L56-L62
progrium/skypipe
skypipe/cloud.py
setup_dotcloud_account
def setup_dotcloud_account(cli): """Gets user/pass for dotcloud, performs auth, and stores keys""" client = RESTClient(endpoint=cli.client.endpoint) client.authenticator = NullAuth() urlmap = client.get('/auth/discovery').item username = cli.prompt('dotCloud email') password = cli.prompt('Password', noecho=True) credential = {'token_url': urlmap.get('token'), 'key': CLIENT_KEY, 'secret': CLIENT_SECRET} try: token = cli.authorize_client(urlmap.get('token'), credential, username, password) except Exception as e: cli.die('Username and password do not match. Try again.') token['url'] = credential['token_url'] config = GlobalConfig() config.data = {'token': token} config.save() cli.global_config = GlobalConfig() # reload cli.setup_auth() cli.get_keys()
python
def setup_dotcloud_account(cli): """Gets user/pass for dotcloud, performs auth, and stores keys""" client = RESTClient(endpoint=cli.client.endpoint) client.authenticator = NullAuth() urlmap = client.get('/auth/discovery').item username = cli.prompt('dotCloud email') password = cli.prompt('Password', noecho=True) credential = {'token_url': urlmap.get('token'), 'key': CLIENT_KEY, 'secret': CLIENT_SECRET} try: token = cli.authorize_client(urlmap.get('token'), credential, username, password) except Exception as e: cli.die('Username and password do not match. Try again.') token['url'] = credential['token_url'] config = GlobalConfig() config.data = {'token': token} config.save() cli.global_config = GlobalConfig() # reload cli.setup_auth() cli.get_keys()
Gets user/pass for dotcloud, performs auth, and stores keys
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L65-L84
progrium/skypipe
skypipe/cloud.py
setup
def setup(cli): """Everything to make skypipe ready to use""" if not cli.global_config.loaded: setup_dotcloud_account(cli) discover_satellite(cli) cli.success("Skypipe is ready for action")
python
def setup(cli): """Everything to make skypipe ready to use""" if not cli.global_config.loaded: setup_dotcloud_account(cli) discover_satellite(cli) cli.success("Skypipe is ready for action")
Everything to make skypipe ready to use
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L86-L91
progrium/skypipe
skypipe/cloud.py
discover_satellite
def discover_satellite(cli, deploy=True, timeout=5): """Looks to make sure a satellite exists, returns endpoint First makes sure we have dotcloud account credentials. Then it looks up the environment for the satellite app. This will contain host and port to construct an endpoint. However, if app doesn't exist, or endpoint does not check out, we call `launch_satellite` to deploy, which calls `discover_satellite` again when finished. Ultimately we return a working endpoint. If deploy is False it will not try to deploy. """ if not cli.global_config.loaded: cli.die("Please setup skypipe by running `skypipe --setup`") try: endpoint = lookup_endpoint(cli) ok = client.check_skypipe_endpoint(endpoint, timeout) if ok: return endpoint else: return launch_satellite(cli) if deploy else None except (RESTAPIError, KeyError): return launch_satellite(cli) if deploy else None
python
def discover_satellite(cli, deploy=True, timeout=5): """Looks to make sure a satellite exists, returns endpoint First makes sure we have dotcloud account credentials. Then it looks up the environment for the satellite app. This will contain host and port to construct an endpoint. However, if app doesn't exist, or endpoint does not check out, we call `launch_satellite` to deploy, which calls `discover_satellite` again when finished. Ultimately we return a working endpoint. If deploy is False it will not try to deploy. """ if not cli.global_config.loaded: cli.die("Please setup skypipe by running `skypipe --setup`") try: endpoint = lookup_endpoint(cli) ok = client.check_skypipe_endpoint(endpoint, timeout) if ok: return endpoint else: return launch_satellite(cli) if deploy else None except (RESTAPIError, KeyError): return launch_satellite(cli) if deploy else None
Looks to make sure a satellite exists, returns endpoint First makes sure we have dotcloud account credentials. Then it looks up the environment for the satellite app. This will contain host and port to construct an endpoint. However, if app doesn't exist, or endpoint does not check out, we call `launch_satellite` to deploy, which calls `discover_satellite` again when finished. Ultimately we return a working endpoint. If deploy is False it will not try to deploy.
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L94-L116
progrium/skypipe
skypipe/cloud.py
launch_satellite
def launch_satellite(cli): """Deploys a new satellite app over any existing app""" cli.info("Launching skypipe satellite:") finish = wait_for(" Pushing to dotCloud") # destroy any existing satellite destroy_satellite(cli) # create new satellite app url = '/applications' try: cli.user.post(url, { 'name': APPNAME, 'flavor': 'sandbox' }) except RESTAPIError as e: if e.code == 409: cli.die('Application "{0}" already exists.'.format(APPNAME)) else: cli.die('Creating application "{0}" failed: {1}'.format(APPNAME, e)) class args: application = APPNAME #cli._connect(args) # push satellite code protocol = 'rsync' url = '/applications/{0}/push-endpoints{1}'.format(APPNAME, '') endpoint = cli._select_endpoint(cli.user.get(url).items, protocol) class args: path = satellite_path cli.push_with_rsync(args, endpoint) # tell dotcloud to deploy, then wait for it to finish revision = None clean = False url = '/applications/{0}/deployments'.format(APPNAME) response = cli.user.post(url, {'revision': revision, 'clean': clean}) deploy_trace_id = response.trace_id deploy_id = response.item['deploy_id'] original_stdout = sys.stdout finish = wait_for(" Waiting for deployment", finish, original_stdout) try: sys.stdout = StringIO() res = cli._stream_deploy_logs(APPNAME, deploy_id, deploy_trace_id=deploy_trace_id, follow=True) if res != 0: return res except KeyboardInterrupt: cli.error('You\'ve closed your log stream with Ctrl-C, ' \ 'but the deployment is still running in the background.') cli.error('If you aborted because of an error ' \ '(e.g. the deployment got stuck), please e-mail\n' \ '[email protected] and mention this trace ID: {0}' .format(deploy_trace_id)) cli.error('If you want to continue following your deployment, ' \ 'try:\n{0}'.format( cli._fmt_deploy_logs_command(deploy_id))) cli.die() except RuntimeError: # workaround for a bug in the current dotcloud client code pass finally: sys.stdout = original_stdout finish = wait_for(" Satellite coming online", finish) endpoint = lookup_endpoint(cli) ok = client.check_skypipe_endpoint(endpoint, 120) finish.set() time.sleep(0.1) # sigh, threads if ok: return endpoint else: cli.die("Satellite failed to come online")
python
def launch_satellite(cli): """Deploys a new satellite app over any existing app""" cli.info("Launching skypipe satellite:") finish = wait_for(" Pushing to dotCloud") # destroy any existing satellite destroy_satellite(cli) # create new satellite app url = '/applications' try: cli.user.post(url, { 'name': APPNAME, 'flavor': 'sandbox' }) except RESTAPIError as e: if e.code == 409: cli.die('Application "{0}" already exists.'.format(APPNAME)) else: cli.die('Creating application "{0}" failed: {1}'.format(APPNAME, e)) class args: application = APPNAME #cli._connect(args) # push satellite code protocol = 'rsync' url = '/applications/{0}/push-endpoints{1}'.format(APPNAME, '') endpoint = cli._select_endpoint(cli.user.get(url).items, protocol) class args: path = satellite_path cli.push_with_rsync(args, endpoint) # tell dotcloud to deploy, then wait for it to finish revision = None clean = False url = '/applications/{0}/deployments'.format(APPNAME) response = cli.user.post(url, {'revision': revision, 'clean': clean}) deploy_trace_id = response.trace_id deploy_id = response.item['deploy_id'] original_stdout = sys.stdout finish = wait_for(" Waiting for deployment", finish, original_stdout) try: sys.stdout = StringIO() res = cli._stream_deploy_logs(APPNAME, deploy_id, deploy_trace_id=deploy_trace_id, follow=True) if res != 0: return res except KeyboardInterrupt: cli.error('You\'ve closed your log stream with Ctrl-C, ' \ 'but the deployment is still running in the background.') cli.error('If you aborted because of an error ' \ '(e.g. the deployment got stuck), please e-mail\n' \ '[email protected] and mention this trace ID: {0}' .format(deploy_trace_id)) cli.error('If you want to continue following your deployment, ' \ 'try:\n{0}'.format( cli._fmt_deploy_logs_command(deploy_id))) cli.die() except RuntimeError: # workaround for a bug in the current dotcloud client code pass finally: sys.stdout = original_stdout finish = wait_for(" Satellite coming online", finish) endpoint = lookup_endpoint(cli) ok = client.check_skypipe_endpoint(endpoint, 120) finish.set() time.sleep(0.1) # sigh, threads if ok: return endpoint else: cli.die("Satellite failed to come online")
Deploys a new satellite app over any existing app
https://github.com/progrium/skypipe/blob/6162610a1876282ff1cc8eeca6c8669b8f605482/skypipe/cloud.py#L125-L204
podhmo/alchemyjsonschema
alchemyjsonschema/compat.py
bytes_
def bytes_(s, encoding='utf-8', errors='strict'): """ If ``s`` is an instance of ``text_type``, return ``s.encode(encoding, errors)``, otherwise return ``s``""" if isinstance(s, text_type): # pragma: no cover return s.encode(encoding, errors) return s
python
def bytes_(s, encoding='utf-8', errors='strict'): """ If ``s`` is an instance of ``text_type``, return ``s.encode(encoding, errors)``, otherwise return ``s``""" if isinstance(s, text_type): # pragma: no cover return s.encode(encoding, errors) return s
If ``s`` is an instance of ``text_type``, return ``s.encode(encoding, errors)``, otherwise return ``s``
https://github.com/podhmo/alchemyjsonschema/blob/0c1da7238413613b1fd21aa31563de948808e3c8/alchemyjsonschema/compat.py#L33-L38
opengisch/pum
pum/core/dumper.py
Dumper.pg_backup
def pg_backup(self, pg_dump_exe='pg_dump', exclude_schema=None): """Call the pg_dump command to create a db backup Parameters ---------- pg_dump_exe: str the pg_dump command path exclude_schema: str[] list of schemas to be skipped """ command = [ pg_dump_exe, '-Fc', '-f', self.file, 'service={}'.format(self.pg_service) ] if exclude_schema: command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema)) subprocess.check_output(command, stderr=subprocess.STDOUT)
python
def pg_backup(self, pg_dump_exe='pg_dump', exclude_schema=None): """Call the pg_dump command to create a db backup Parameters ---------- pg_dump_exe: str the pg_dump command path exclude_schema: str[] list of schemas to be skipped """ command = [ pg_dump_exe, '-Fc', '-f', self.file, 'service={}'.format(self.pg_service) ] if exclude_schema: command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema)) subprocess.check_output(command, stderr=subprocess.STDOUT)
Call the pg_dump command to create a db backup Parameters ---------- pg_dump_exe: str the pg_dump command path exclude_schema: str[] list of schemas to be skipped
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/dumper.py#L17-L35
opengisch/pum
pum/core/dumper.py
Dumper.pg_restore
def pg_restore(self, pg_restore_exe='pg_restore', exclude_schema=None): """Call the pg_restore command to restore a db backup Parameters ---------- pg_restore_exe: str the pg_restore command path """ command = [ pg_restore_exe, '-d', 'service={}'.format(self.pg_service), '--no-owner' ] if exclude_schema: exclude_schema_available = False try: pg_version = subprocess.check_output(['pg_restore','--version']) pg_version = str(pg_version).replace('\\n', '').replace("'", '').split(' ')[-1] exclude_schema_available = LooseVersion(pg_version) >= LooseVersion("10.0") except subprocess.CalledProcessError as e: print("*** Could not get pg_restore version:\n", e.stderr) if exclude_schema_available: command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema)) command.append(self.file) try: subprocess.check_output(command) except subprocess.CalledProcessError as e: print("*** pg_restore failed:\n", command, '\n', e.stderr)
python
def pg_restore(self, pg_restore_exe='pg_restore', exclude_schema=None): """Call the pg_restore command to restore a db backup Parameters ---------- pg_restore_exe: str the pg_restore command path """ command = [ pg_restore_exe, '-d', 'service={}'.format(self.pg_service), '--no-owner' ] if exclude_schema: exclude_schema_available = False try: pg_version = subprocess.check_output(['pg_restore','--version']) pg_version = str(pg_version).replace('\\n', '').replace("'", '').split(' ')[-1] exclude_schema_available = LooseVersion(pg_version) >= LooseVersion("10.0") except subprocess.CalledProcessError as e: print("*** Could not get pg_restore version:\n", e.stderr) if exclude_schema_available: command.append(' '.join("--exclude-schema={}".format(schema) for schema in exclude_schema)) command.append(self.file) try: subprocess.check_output(command) except subprocess.CalledProcessError as e: print("*** pg_restore failed:\n", command, '\n', e.stderr)
Call the pg_restore command to restore a db backup Parameters ---------- pg_restore_exe: str the pg_restore command path
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/dumper.py#L37-L67
opengisch/pum
pum/core/upgrader.py
Upgrader.exists_table_upgrades
def exists_table_upgrades(self): """Return if the upgrades table exists Returns ------- bool True if the table exists False if the table don't exists""" query = """ SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = '{}' AND table_name = '{}' ); """.format(self.upgrades_table[:self.upgrades_table.index('.')], self.upgrades_table[self.upgrades_table.index('.')+1:]) self.cursor.execute(query) return self.cursor.fetchone()[0]
python
def exists_table_upgrades(self): """Return if the upgrades table exists Returns ------- bool True if the table exists False if the table don't exists""" query = """ SELECT EXISTS ( SELECT 1 FROM information_schema.tables WHERE table_schema = '{}' AND table_name = '{}' ); """.format(self.upgrades_table[:self.upgrades_table.index('.')], self.upgrades_table[self.upgrades_table.index('.')+1:]) self.cursor.execute(query) return self.cursor.fetchone()[0]
Return if the upgrades table exists Returns ------- bool True if the table exists False if the table don't exists
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L91-L111
opengisch/pum
pum/core/upgrader.py
Upgrader.__get_delta_files
def __get_delta_files(self): """Search for delta files and return a dict of Delta objects, keyed by directory names.""" files = [(d, f) for d in self.dirs for f in listdir(d) if isfile(join(d, f))] deltas = OrderedDict() for d, f in files: file_ = join(d, f) if not Delta.is_valid_delta_name(file_): continue delta = Delta(file_) if d not in deltas: deltas[d] = [] deltas[d].append(delta) # sort delta objects in each bucket for d in deltas: deltas[d].sort(key=lambda x: (x.get_version(), x.get_priority(), x.get_name())) return deltas
python
def __get_delta_files(self): """Search for delta files and return a dict of Delta objects, keyed by directory names.""" files = [(d, f) for d in self.dirs for f in listdir(d) if isfile(join(d, f))] deltas = OrderedDict() for d, f in files: file_ = join(d, f) if not Delta.is_valid_delta_name(file_): continue delta = Delta(file_) if d not in deltas: deltas[d] = [] deltas[d].append(delta) # sort delta objects in each bucket for d in deltas: deltas[d].sort(key=lambda x: (x.get_version(), x.get_priority(), x.get_name())) return deltas
Search for delta files and return a dict of Delta objects, keyed by directory names.
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L121-L142
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_delta_sql
def __run_delta_sql(self, delta): """Execute the delta sql file on the database""" self.__run_sql_file(delta.get_file()) self.__update_upgrades_table(delta)
python
def __run_delta_sql(self, delta): """Execute the delta sql file on the database""" self.__run_sql_file(delta.get_file()) self.__update_upgrades_table(delta)
Execute the delta sql file on the database
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L144-L148
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_delta_py
def __run_delta_py(self, delta): """Execute the delta py file""" self.__run_py_file(delta.get_file(), delta.get_name()) self.__update_upgrades_table(delta)
python
def __run_delta_py(self, delta): """Execute the delta py file""" self.__run_py_file(delta.get_file(), delta.get_name()) self.__update_upgrades_table(delta)
Execute the delta py file
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L150-L154
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_pre_all
def __run_pre_all(self): """Execute the pre-all.py and pre-all.sql files if they exist""" # if the list of delta dirs is [delta1, delta2] the pre scripts of delta2 are # executed before the pre scripts of delta1 for d in reversed(self.dirs): pre_all_py_path = os.path.join(d, 'pre-all.py') if os.path.isfile(pre_all_py_path): print(' Applying pre-all.py...', end=' ') self.__run_py_file(pre_all_py_path, 'pre-all') print('OK') pre_all_sql_path = os.path.join(d, 'pre-all.sql') if os.path.isfile(pre_all_sql_path): print(' Applying pre-all.sql...', end=' ') self.__run_sql_file(pre_all_sql_path) print('OK')
python
def __run_pre_all(self): """Execute the pre-all.py and pre-all.sql files if they exist""" # if the list of delta dirs is [delta1, delta2] the pre scripts of delta2 are # executed before the pre scripts of delta1 for d in reversed(self.dirs): pre_all_py_path = os.path.join(d, 'pre-all.py') if os.path.isfile(pre_all_py_path): print(' Applying pre-all.py...', end=' ') self.__run_py_file(pre_all_py_path, 'pre-all') print('OK') pre_all_sql_path = os.path.join(d, 'pre-all.sql') if os.path.isfile(pre_all_sql_path): print(' Applying pre-all.sql...', end=' ') self.__run_sql_file(pre_all_sql_path) print('OK')
Execute the pre-all.py and pre-all.sql files if they exist
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L156-L173
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_post_all
def __run_post_all(self): """Execute the post-all.py and post-all.sql files if they exist""" # if the list of delta dirs is [delta1, delta2] the post scripts of delta1 are # executed before the post scripts of delta2 for d in self.dirs: post_all_py_path = os.path.join(d, 'post-all.py') if os.path.isfile(post_all_py_path): print(' Applying post-all.py...', end=' ') self.__run_py_file(post_all_py_path, 'post-all') print('OK') post_all_sql_path = os.path.join(d, 'post-all.sql') if os.path.isfile(post_all_sql_path): print(' Applying post-all.sql...', end=' ') self.__run_sql_file(post_all_sql_path) print('OK')
python
def __run_post_all(self): """Execute the post-all.py and post-all.sql files if they exist""" # if the list of delta dirs is [delta1, delta2] the post scripts of delta1 are # executed before the post scripts of delta2 for d in self.dirs: post_all_py_path = os.path.join(d, 'post-all.py') if os.path.isfile(post_all_py_path): print(' Applying post-all.py...', end=' ') self.__run_py_file(post_all_py_path, 'post-all') print('OK') post_all_sql_path = os.path.join(d, 'post-all.sql') if os.path.isfile(post_all_sql_path): print(' Applying post-all.sql...', end=' ') self.__run_sql_file(post_all_sql_path) print('OK')
Execute the post-all.py and post-all.sql files if they exist
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L175-L192
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_sql_file
def __run_sql_file(self, filepath): """Execute the sql file at the passed path Parameters ---------- filepath: str the path of the file to execute""" with open(filepath, 'r') as delta_file: sql = delta_file.read() if self.variables: self.cursor.execute(sql, self.variables) else: self.cursor.execute(sql) self.connection.commit()
python
def __run_sql_file(self, filepath): """Execute the sql file at the passed path Parameters ---------- filepath: str the path of the file to execute""" with open(filepath, 'r') as delta_file: sql = delta_file.read() if self.variables: self.cursor.execute(sql, self.variables) else: self.cursor.execute(sql) self.connection.commit()
Execute the sql file at the passed path Parameters ---------- filepath: str the path of the file to execute
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L194-L208
opengisch/pum
pum/core/upgrader.py
Upgrader.__run_py_file
def __run_py_file(self, filepath, module_name): """Execute the python file at the passed path Parameters ---------- filepath: str the path of the file to execute module_name: str the name of the python module """ # Import the module spec = importlib.util.spec_from_file_location(module_name, filepath) delta_py = importlib.util.module_from_spec(spec) spec.loader.exec_module(delta_py) # Get the python file's directory path # Note: we add a separator for backward compatibility, as existing DeltaPy subclasses # may assume that delta_dir ends with a separator dir_ = dirname(filepath) + os.sep # Search for subclasses of DeltaPy for name in dir(delta_py): obj = getattr(delta_py, name) if inspect.isclass(obj) and not obj == DeltaPy and issubclass( obj, DeltaPy): delta_py_inst = obj( self.current_db_version(), dir_, self.dirs, self.pg_service, self.upgrades_table, variables=self.variables) delta_py_inst.run()
python
def __run_py_file(self, filepath, module_name): """Execute the python file at the passed path Parameters ---------- filepath: str the path of the file to execute module_name: str the name of the python module """ # Import the module spec = importlib.util.spec_from_file_location(module_name, filepath) delta_py = importlib.util.module_from_spec(spec) spec.loader.exec_module(delta_py) # Get the python file's directory path # Note: we add a separator for backward compatibility, as existing DeltaPy subclasses # may assume that delta_dir ends with a separator dir_ = dirname(filepath) + os.sep # Search for subclasses of DeltaPy for name in dir(delta_py): obj = getattr(delta_py, name) if inspect.isclass(obj) and not obj == DeltaPy and issubclass( obj, DeltaPy): delta_py_inst = obj( self.current_db_version(), dir_, self.dirs, self.pg_service, self.upgrades_table, variables=self.variables) delta_py_inst.run()
Execute the python file at the passed path Parameters ---------- filepath: str the path of the file to execute module_name: str the name of the python module
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L210-L240
opengisch/pum
pum/core/upgrader.py
Upgrader.show_info
def show_info(self): """Print info about found delta files and about already made upgrades""" deltas = self.__get_delta_files() table = [['Version', 'Name', 'Type', 'Status']] for dir_ in deltas: print('delta files in dir: ', dir_) for delta in deltas[dir_]: line = [delta.get_version(), delta.get_name()] if delta.get_type() == DeltaType.PRE_PYTHON: line.append('pre py') elif delta.get_type() == DeltaType.PRE_SQL: line.append('pre sql') elif delta.get_type() == DeltaType.PYTHON: line.append('delta py') elif delta.get_type() == DeltaType.SQL: line.append('delta sql') elif delta.get_type() == DeltaType.POST_PYTHON: line.append('post py') elif delta.get_type() == DeltaType.POST_SQL: line.append('post sql') if self.__is_applied(delta): line.append('Applied') else: line.append('Pending') table.append(line) self.__print_table(table) print('') print('Applied upgrades in database') query = """SELECT version, description, type, installed_by, installed_on, success FROM {} """.format(self.upgrades_table) self.cursor.execute(query) records = self.cursor.fetchall() table = [['Version', 'Name', 'Type', 'Installed by', 'Installed on', 'Status']] for i in records: line = [str(i[0]), str(i[1])] delta_type = i[2] if delta_type == 0: line.append('baseline') elif delta_type == DeltaType.PRE_PYTHON: line.append('pre py') elif delta_type == DeltaType.PRE_SQL: line.append('pre sql') elif delta_type == DeltaType.PYTHON: line.append('delta py') elif delta_type == DeltaType.SQL: line.append('delta sql') elif delta_type == DeltaType.POST_PYTHON: line.append('post py') elif delta_type == DeltaType.POST_SQL: line.append('post sql') line.append(str(i[3])) line.append(str(i[4])) success = str(i[5]) if success == 'True': line.append('Success') else: line.append('Failed') table.append(line) self.__print_table(table)
python
def show_info(self): """Print info about found delta files and about already made upgrades""" deltas = self.__get_delta_files() table = [['Version', 'Name', 'Type', 'Status']] for dir_ in deltas: print('delta files in dir: ', dir_) for delta in deltas[dir_]: line = [delta.get_version(), delta.get_name()] if delta.get_type() == DeltaType.PRE_PYTHON: line.append('pre py') elif delta.get_type() == DeltaType.PRE_SQL: line.append('pre sql') elif delta.get_type() == DeltaType.PYTHON: line.append('delta py') elif delta.get_type() == DeltaType.SQL: line.append('delta sql') elif delta.get_type() == DeltaType.POST_PYTHON: line.append('post py') elif delta.get_type() == DeltaType.POST_SQL: line.append('post sql') if self.__is_applied(delta): line.append('Applied') else: line.append('Pending') table.append(line) self.__print_table(table) print('') print('Applied upgrades in database') query = """SELECT version, description, type, installed_by, installed_on, success FROM {} """.format(self.upgrades_table) self.cursor.execute(query) records = self.cursor.fetchall() table = [['Version', 'Name', 'Type', 'Installed by', 'Installed on', 'Status']] for i in records: line = [str(i[0]), str(i[1])] delta_type = i[2] if delta_type == 0: line.append('baseline') elif delta_type == DeltaType.PRE_PYTHON: line.append('pre py') elif delta_type == DeltaType.PRE_SQL: line.append('pre sql') elif delta_type == DeltaType.PYTHON: line.append('delta py') elif delta_type == DeltaType.SQL: line.append('delta sql') elif delta_type == DeltaType.POST_PYTHON: line.append('post py') elif delta_type == DeltaType.POST_SQL: line.append('post sql') line.append(str(i[3])) line.append(str(i[4])) success = str(i[5]) if success == 'True': line.append('Success') else: line.append('Failed') table.append(line) self.__print_table(table)
Print info about found delta files and about already made upgrades
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L242-L324
opengisch/pum
pum/core/upgrader.py
Upgrader.__print_table
def __print_table(table): """Print a list in tabular format Based on https://stackoverflow.com/a/8356620""" col_width = [max(len(x) for x in col) for col in zip(*table)] print("| " + " | ".join("{:{}}".format(x, col_width[i]) for i, x in enumerate(table[0])) + " |") print("| " + " | ".join("{:{}}".format('-' * col_width[i], col_width[i]) for i, x in enumerate(table[0])) + " |") for line in table[1:]: print("| " + " | ".join("{:{}}".format(x, col_width[i]) for i, x in enumerate(line)) + " |")
python
def __print_table(table): """Print a list in tabular format Based on https://stackoverflow.com/a/8356620""" col_width = [max(len(x) for x in col) for col in zip(*table)] print("| " + " | ".join("{:{}}".format(x, col_width[i]) for i, x in enumerate(table[0])) + " |") print("| " + " | ".join("{:{}}".format('-' * col_width[i], col_width[i]) for i, x in enumerate(table[0])) + " |") for line in table[1:]: print("| " + " | ".join("{:{}}".format(x, col_width[i]) for i, x in enumerate(line)) + " |")
Print a list in tabular format Based on https://stackoverflow.com/a/8356620
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L327-L338
opengisch/pum
pum/core/upgrader.py
Upgrader.__is_applied
def __is_applied(self, delta): """Verifies if delta file is already applied on database Parameters ---------- delta: Delta object The delta object representing the delta file Returns ------- bool True if the delta is already applied on the db False otherwise """ query = """ SELECT id FROM {} WHERE version = '{}' AND checksum = '{}' AND success = 'TRUE' """.format( self.upgrades_table, delta.get_version(), delta.get_checksum()) self.cursor.execute(query) if not self.cursor.fetchone(): return False else: return True
python
def __is_applied(self, delta): """Verifies if delta file is already applied on database Parameters ---------- delta: Delta object The delta object representing the delta file Returns ------- bool True if the delta is already applied on the db False otherwise """ query = """ SELECT id FROM {} WHERE version = '{}' AND checksum = '{}' AND success = 'TRUE' """.format( self.upgrades_table, delta.get_version(), delta.get_checksum()) self.cursor.execute(query) if not self.cursor.fetchone(): return False else: return True
Verifies if delta file is already applied on database Parameters ---------- delta: Delta object The delta object representing the delta file Returns ------- bool True if the delta is already applied on the db False otherwise
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L340-L367
opengisch/pum
pum/core/upgrader.py
Upgrader.__update_upgrades_table
def __update_upgrades_table(self, delta): """Add a new record into the upgrades information table about the applied delta Parameters ---------- delta: Delta the applied delta file""" query = """ INSERT INTO {} ( --id, version, description, type, script, checksum, installed_by, --installed_on, execution_time, success ) VALUES( '{}', '{}', {}, '{}', '{}', '{}', 1, TRUE ) """.format( self.upgrades_table, delta.get_version(), delta.get_name(), delta.get_type(), delta.get_file(), delta.get_checksum(), self.__get_dbuser()) self.cursor.execute(query) self.connection.commit()
python
def __update_upgrades_table(self, delta): """Add a new record into the upgrades information table about the applied delta Parameters ---------- delta: Delta the applied delta file""" query = """ INSERT INTO {} ( --id, version, description, type, script, checksum, installed_by, --installed_on, execution_time, success ) VALUES( '{}', '{}', {}, '{}', '{}', '{}', 1, TRUE ) """.format( self.upgrades_table, delta.get_version(), delta.get_name(), delta.get_type(), delta.get_file(), delta.get_checksum(), self.__get_dbuser()) self.cursor.execute(query) self.connection.commit()
Add a new record into the upgrades information table about the applied delta Parameters ---------- delta: Delta the applied delta file
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L369-L405
opengisch/pum
pum/core/upgrader.py
Upgrader.create_upgrades_table
def create_upgrades_table(self): """Create the upgrades information table""" query = """CREATE TABLE IF NOT EXISTS {} ( id serial NOT NULL, version character varying(50), description character varying(200) NOT NULL, type integer NOT NULL, script character varying(1000) NOT NULL, checksum character varying(32) NOT NULL, installed_by character varying(100) NOT NULL, installed_on timestamp without time zone NOT NULL DEFAULT now(), execution_time integer NOT NULL, success boolean NOT NULL, PRIMARY KEY (id) ) """.format(self.upgrades_table) self.cursor.execute(query) self.connection.commit()
python
def create_upgrades_table(self): """Create the upgrades information table""" query = """CREATE TABLE IF NOT EXISTS {} ( id serial NOT NULL, version character varying(50), description character varying(200) NOT NULL, type integer NOT NULL, script character varying(1000) NOT NULL, checksum character varying(32) NOT NULL, installed_by character varying(100) NOT NULL, installed_on timestamp without time zone NOT NULL DEFAULT now(), execution_time integer NOT NULL, success boolean NOT NULL, PRIMARY KEY (id) ) """.format(self.upgrades_table) self.cursor.execute(query) self.connection.commit()
Create the upgrades information table
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L407-L427
opengisch/pum
pum/core/upgrader.py
Upgrader.set_baseline
def set_baseline(self, version): """Set the baseline into the creation information table version: str The version of the current database to set in the information table. The baseline must be in the format x.x.x where x are numbers. """ pattern = re.compile(r"^\d+\.\d+\.\d+$") if not re.match(pattern, version): raise ValueError('Wrong version format') query = """ INSERT INTO {} ( version, description, type, script, checksum, installed_by, execution_time, success ) VALUES( '{}', '{}', {}, '{}', '{}', '{}', 1, TRUE ) """.format(self.upgrades_table, version, 'baseline', 0, '', '', self.__get_dbuser()) self.cursor.execute(query) self.connection.commit()
python
def set_baseline(self, version): """Set the baseline into the creation information table version: str The version of the current database to set in the information table. The baseline must be in the format x.x.x where x are numbers. """ pattern = re.compile(r"^\d+\.\d+\.\d+$") if not re.match(pattern, version): raise ValueError('Wrong version format') query = """ INSERT INTO {} ( version, description, type, script, checksum, installed_by, execution_time, success ) VALUES( '{}', '{}', {}, '{}', '{}', '{}', 1, TRUE ) """.format(self.upgrades_table, version, 'baseline', 0, '', '', self.__get_dbuser()) self.cursor.execute(query) self.connection.commit()
Set the baseline into the creation information table version: str The version of the current database to set in the information table. The baseline must be in the format x.x.x where x are numbers.
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L429-L462
opengisch/pum
pum/core/upgrader.py
Upgrader.current_db_version
def current_db_version(self): """Read the upgrades information table and return the current db version Returns ------- str the current db version """ query = """ SELECT version from {} WHERE success = TRUE ORDER BY version DESC """.format(self.upgrades_table) self.cursor.execute(query) return self.cursor.fetchone()[0]
python
def current_db_version(self): """Read the upgrades information table and return the current db version Returns ------- str the current db version """ query = """ SELECT version from {} WHERE success = TRUE ORDER BY version DESC """.format(self.upgrades_table) self.cursor.execute(query) return self.cursor.fetchone()[0]
Read the upgrades information table and return the current db version Returns ------- str the current db version
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L464-L480
opengisch/pum
pum/core/upgrader.py
Delta.is_valid_delta_name
def is_valid_delta_name(file): """Return if a file has a valid name A delta file name can be: - pre-all.py - pre-all.sql - delta_x.x.x_ddmmyyyy.pre.py - delta_x.x.x_ddmmyyyy.pre.sql - delta_x.x.x_ddmmyyyy.py - delta_x.x.x_ddmmyyyy.sql - delta_x.x.x_ddmmyyyy.post.py - delta_x.x.x_ddmmyyyy.post.sql - post-all.py - post-all.sql where x.x.x is the version number and _ddmmyyyy is an optional description, usually representing the date of the delta file """ filename = basename(file) pattern = re.compile(Delta.FILENAME_PATTERN) if re.match(pattern, filename): return True return False
python
def is_valid_delta_name(file): """Return if a file has a valid name A delta file name can be: - pre-all.py - pre-all.sql - delta_x.x.x_ddmmyyyy.pre.py - delta_x.x.x_ddmmyyyy.pre.sql - delta_x.x.x_ddmmyyyy.py - delta_x.x.x_ddmmyyyy.sql - delta_x.x.x_ddmmyyyy.post.py - delta_x.x.x_ddmmyyyy.post.sql - post-all.py - post-all.sql where x.x.x is the version number and _ddmmyyyy is an optional description, usually representing the date of the delta file """ filename = basename(file) pattern = re.compile(Delta.FILENAME_PATTERN) if re.match(pattern, filename): return True return False
Return if a file has a valid name A delta file name can be: - pre-all.py - pre-all.sql - delta_x.x.x_ddmmyyyy.pre.py - delta_x.x.x_ddmmyyyy.pre.sql - delta_x.x.x_ddmmyyyy.py - delta_x.x.x_ddmmyyyy.sql - delta_x.x.x_ddmmyyyy.post.py - delta_x.x.x_ddmmyyyy.post.sql - post-all.py - post-all.sql where x.x.x is the version number and _ddmmyyyy is an optional description, usually representing the date of the delta file
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L505-L527
opengisch/pum
pum/core/upgrader.py
Delta.get_checksum
def get_checksum(self): """Return the md5 checksum of the delta file.""" with open(self.file, 'rb') as f: cs = md5(f.read()).hexdigest() return cs
python
def get_checksum(self): """Return the md5 checksum of the delta file.""" with open(self.file, 'rb') as f: cs = md5(f.read()).hexdigest() return cs
Return the md5 checksum of the delta file.
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L546-L550
opengisch/pum
pum/core/upgrader.py
Delta.get_type
def get_type(self): """Return the type of the delta file. Returns ------- type: int """ ext = self.match.group(5) if ext == 'pre.py': return DeltaType.PRE_PYTHON elif ext == 'pre.sql': return DeltaType.PRE_SQL elif ext == 'py': return DeltaType.PYTHON elif ext == 'sql': return DeltaType.SQL elif ext == 'post.py': return DeltaType.POST_PYTHON elif ext == 'post.sql': return DeltaType.POST_SQL
python
def get_type(self): """Return the type of the delta file. Returns ------- type: int """ ext = self.match.group(5) if ext == 'pre.py': return DeltaType.PRE_PYTHON elif ext == 'pre.sql': return DeltaType.PRE_SQL elif ext == 'py': return DeltaType.PYTHON elif ext == 'sql': return DeltaType.SQL elif ext == 'post.py': return DeltaType.POST_PYTHON elif ext == 'post.sql': return DeltaType.POST_SQL
Return the type of the delta file. Returns ------- type: int
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L552-L573
opengisch/pum
pum/core/upgrader.py
Delta.get_priority
def get_priority(self) -> int: """ Rerturns the priority of the file from 1 (pre) to 3 (post) :return: the priority """ dtype = self.get_type() if dtype & DeltaType.PRE: return 1 elif dtype & DeltaType.POST: return 3 else: return 2
python
def get_priority(self) -> int: """ Rerturns the priority of the file from 1 (pre) to 3 (post) :return: the priority """ dtype = self.get_type() if dtype & DeltaType.PRE: return 1 elif dtype & DeltaType.POST: return 3 else: return 2
Rerturns the priority of the file from 1 (pre) to 3 (post) :return: the priority
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/upgrader.py#L575-L586
opengisch/pum
pum/core/deltapy.py
DeltaPy.variable
def variable(self, name: str, default_value=None): """ Safely returns the value of the variable given in PUM Parameters ---------- name the name of the variable default_value the default value for the variable if it does not exist """ return self.__variables.get(name, default_value)
python
def variable(self, name: str, default_value=None): """ Safely returns the value of the variable given in PUM Parameters ---------- name the name of the variable default_value the default value for the variable if it does not exist """ return self.__variables.get(name, default_value)
Safely returns the value of the variable given in PUM Parameters ---------- name the name of the variable default_value the default value for the variable if it does not exist
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/deltapy.py#L64-L75
opengisch/pum
pum/core/checker.py
Checker.run_checks
def run_checks(self): """Run all the checks functions. Returns ------- bool True if all the checks are true False otherwise dict Dictionary of lists of differences """ result = True differences_dict = {} if 'tables' not in self.ignore_list: tmp_result, differences_dict['tables'] = self.check_tables() result = False if not tmp_result else result if 'columns' not in self.ignore_list: tmp_result, differences_dict['columns'] = self.check_columns( 'views' not in self.ignore_list) result = False if not tmp_result else result if 'constraints' not in self.ignore_list: tmp_result, differences_dict['constraints'] = \ self.check_constraints() result = False if not tmp_result else result if 'views' not in self.ignore_list: tmp_result, differences_dict['views'] = self.check_views() result = False if not tmp_result else result if 'sequences' not in self.ignore_list: tmp_result, differences_dict['sequences'] = self.check_sequences() result = False if not tmp_result else result if 'indexes' not in self.ignore_list: tmp_result, differences_dict['indexes'] = self.check_indexes() result = False if not tmp_result else result if 'triggers' not in self.ignore_list: tmp_result, differences_dict['triggers'] = self.check_triggers() result = False if not tmp_result else result if 'functions' not in self.ignore_list: tmp_result, differences_dict['functions'] = self.check_functions() result = False if not tmp_result else result if 'rules' not in self.ignore_list: tmp_result, differences_dict['rules'] = self.check_rules() result = False if not tmp_result else result if self.verbose_level == 0: differences_dict = None return result, differences_dict
python
def run_checks(self): """Run all the checks functions. Returns ------- bool True if all the checks are true False otherwise dict Dictionary of lists of differences """ result = True differences_dict = {} if 'tables' not in self.ignore_list: tmp_result, differences_dict['tables'] = self.check_tables() result = False if not tmp_result else result if 'columns' not in self.ignore_list: tmp_result, differences_dict['columns'] = self.check_columns( 'views' not in self.ignore_list) result = False if not tmp_result else result if 'constraints' not in self.ignore_list: tmp_result, differences_dict['constraints'] = \ self.check_constraints() result = False if not tmp_result else result if 'views' not in self.ignore_list: tmp_result, differences_dict['views'] = self.check_views() result = False if not tmp_result else result if 'sequences' not in self.ignore_list: tmp_result, differences_dict['sequences'] = self.check_sequences() result = False if not tmp_result else result if 'indexes' not in self.ignore_list: tmp_result, differences_dict['indexes'] = self.check_indexes() result = False if not tmp_result else result if 'triggers' not in self.ignore_list: tmp_result, differences_dict['triggers'] = self.check_triggers() result = False if not tmp_result else result if 'functions' not in self.ignore_list: tmp_result, differences_dict['functions'] = self.check_functions() result = False if not tmp_result else result if 'rules' not in self.ignore_list: tmp_result, differences_dict['rules'] = self.check_rules() result = False if not tmp_result else result if self.verbose_level == 0: differences_dict = None return result, differences_dict
Run all the checks functions. Returns ------- bool True if all the checks are true False otherwise dict Dictionary of lists of differences
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/checker.py#L51-L97
opengisch/pum
pum/core/checker.py
Checker.check_columns
def check_columns(self, check_views=True): """Check if the columns in all tables are equals. Parameters ---------- check_views: bool if True, check the columns of all the tables and views, if False check only the columns of the tables Returns ------- bool True if the columns are the same False otherwise list A list with the differences """ if check_views: query = """WITH table_list AS ( SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN {} AND table_schema NOT LIKE 'pg\_%' ORDER BY table_schema,table_name ) SELECT isc.table_schema, isc.table_name, column_name, column_default, is_nullable, data_type, character_maximum_length::text, numeric_precision::text, numeric_precision_radix::text, datetime_precision::text FROM information_schema.columns isc, table_list tl WHERE isc.table_schema = tl.table_schema AND isc.table_name = tl.table_name ORDER BY isc.table_schema, isc.table_name, column_name """.format(self.exclude_schema) else: query = """WITH table_list AS ( SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN {} AND table_schema NOT LIKE 'pg\_%' AND table_type NOT LIKE 'VIEW' ORDER BY table_schema,table_name ) SELECT isc.table_schema, isc.table_name, column_name, column_default, is_nullable, data_type, character_maximum_length::text, numeric_precision::text, numeric_precision_radix::text, datetime_precision::text FROM information_schema.columns isc, table_list tl WHERE isc.table_schema = tl.table_schema AND isc.table_name = tl.table_name ORDER BY isc.table_schema, isc.table_name, column_name """.format(self.exclude_schema) return self.__check_equals(query)
python
def check_columns(self, check_views=True): """Check if the columns in all tables are equals. Parameters ---------- check_views: bool if True, check the columns of all the tables and views, if False check only the columns of the tables Returns ------- bool True if the columns are the same False otherwise list A list with the differences """ if check_views: query = """WITH table_list AS ( SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN {} AND table_schema NOT LIKE 'pg\_%' ORDER BY table_schema,table_name ) SELECT isc.table_schema, isc.table_name, column_name, column_default, is_nullable, data_type, character_maximum_length::text, numeric_precision::text, numeric_precision_radix::text, datetime_precision::text FROM information_schema.columns isc, table_list tl WHERE isc.table_schema = tl.table_schema AND isc.table_name = tl.table_name ORDER BY isc.table_schema, isc.table_name, column_name """.format(self.exclude_schema) else: query = """WITH table_list AS ( SELECT table_schema, table_name FROM information_schema.tables WHERE table_schema NOT IN {} AND table_schema NOT LIKE 'pg\_%' AND table_type NOT LIKE 'VIEW' ORDER BY table_schema,table_name ) SELECT isc.table_schema, isc.table_name, column_name, column_default, is_nullable, data_type, character_maximum_length::text, numeric_precision::text, numeric_precision_radix::text, datetime_precision::text FROM information_schema.columns isc, table_list tl WHERE isc.table_schema = tl.table_schema AND isc.table_name = tl.table_name ORDER BY isc.table_schema, isc.table_name, column_name """.format(self.exclude_schema) return self.__check_equals(query)
Check if the columns in all tables are equals. Parameters ---------- check_views: bool if True, check the columns of all the tables and views, if False check only the columns of the tables Returns ------- bool True if the columns are the same False otherwise list A list with the differences
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/checker.py#L120-L176
opengisch/pum
pum/core/checker.py
Checker.check_rules
def check_rules(self): """Check if the rules are equals. Returns ------- bool True if the rules are the same False otherwise list A list with the differences """ query = """ select n.nspname as rule_schema, c.relname as rule_table, case r.ev_type when '1' then 'SELECT' when '2' then 'UPDATE' when '3' then 'INSERT' when '4' then 'DELETE' else 'UNKNOWN' end as rule_event from pg_rewrite r join pg_class c on r.ev_class = c.oid left join pg_namespace n on n.oid = c.relnamespace left join pg_description d on r.oid = d.objoid WHERE n.nspname NOT IN {excl} AND r.rulename != '_RETURN' AND n.nspname NOT LIKE 'pg\_%' ORDER BY n.nspname, c.relname, rule_event """.format(excl=self.exclude_schema) return self.__check_equals(query)
python
def check_rules(self): """Check if the rules are equals. Returns ------- bool True if the rules are the same False otherwise list A list with the differences """ query = """ select n.nspname as rule_schema, c.relname as rule_table, case r.ev_type when '1' then 'SELECT' when '2' then 'UPDATE' when '3' then 'INSERT' when '4' then 'DELETE' else 'UNKNOWN' end as rule_event from pg_rewrite r join pg_class c on r.ev_class = c.oid left join pg_namespace n on n.oid = c.relnamespace left join pg_description d on r.oid = d.objoid WHERE n.nspname NOT IN {excl} AND r.rulename != '_RETURN' AND n.nspname NOT LIKE 'pg\_%' ORDER BY n.nspname, c.relname, rule_event """.format(excl=self.exclude_schema) return self.__check_equals(query)
Check if the rules are equals. Returns ------- bool True if the rules are the same False otherwise list A list with the differences
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/checker.py#L342-L373
opengisch/pum
pum/core/checker.py
Checker.__check_equals
def __check_equals(self, query): """Check if the query results on the two databases are equals. Returns ------- bool True if the results are the same False otherwise list A list with the differences """ self.cur1.execute(query) records1 = self.cur1.fetchall() self.cur2.execute(query) records2 = self.cur2.fetchall() result = True differences = [] d = difflib.Differ() records1 = [str(x) for x in records1] records2 = [str(x) for x in records2] for line in d.compare(records1, records2): if line[0] in ('-', '+'): result = False if self.verbose_level == 1: differences.append(line[0:79]) elif self.verbose_level == 2: differences.append(line) return result, differences
python
def __check_equals(self, query): """Check if the query results on the two databases are equals. Returns ------- bool True if the results are the same False otherwise list A list with the differences """ self.cur1.execute(query) records1 = self.cur1.fetchall() self.cur2.execute(query) records2 = self.cur2.fetchall() result = True differences = [] d = difflib.Differ() records1 = [str(x) for x in records1] records2 = [str(x) for x in records2] for line in d.compare(records1, records2): if line[0] in ('-', '+'): result = False if self.verbose_level == 1: differences.append(line[0:79]) elif self.verbose_level == 2: differences.append(line) return result, differences
Check if the query results on the two databases are equals. Returns ------- bool True if the results are the same False otherwise list A list with the differences
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/core/checker.py#L375-L407
opengisch/pum
pum/utils/utils.py
ask_for_confirmation
def ask_for_confirmation(prompt=None, resp=False): """Prompt for a yes or no response from the user. Parameters ---------- prompt: basestring The question to be prompted to the user. resp: bool The default value assumed by the caller when user simply types ENTER. Returns ------- bool True if the user response is 'y' or 'Y' False if the user response is 'n' or 'N' """ global input if prompt is None: prompt = 'Confirm' if resp: prompt = '%s [%s]|%s: ' % (prompt, 'y', 'n') else: prompt = '%s [%s]|%s: ' % (prompt, 'n', 'y') while True: # Fix for Python2. In python3 raw_input() is now input() try: input = raw_input except NameError: pass ans = input(prompt) if not ans: return resp if ans not in ['y', 'Y', 'n', 'N']: print('please enter y or n.') continue if ans == 'y' or ans == 'Y': return True if ans == 'n' or ans == 'N': return False
python
def ask_for_confirmation(prompt=None, resp=False): """Prompt for a yes or no response from the user. Parameters ---------- prompt: basestring The question to be prompted to the user. resp: bool The default value assumed by the caller when user simply types ENTER. Returns ------- bool True if the user response is 'y' or 'Y' False if the user response is 'n' or 'N' """ global input if prompt is None: prompt = 'Confirm' if resp: prompt = '%s [%s]|%s: ' % (prompt, 'y', 'n') else: prompt = '%s [%s]|%s: ' % (prompt, 'n', 'y') while True: # Fix for Python2. In python3 raw_input() is now input() try: input = raw_input except NameError: pass ans = input(prompt) if not ans: return resp if ans not in ['y', 'Y', 'n', 'N']: print('please enter y or n.') continue if ans == 'y' or ans == 'Y': return True if ans == 'n' or ans == 'N': return False
Prompt for a yes or no response from the user. Parameters ---------- prompt: basestring The question to be prompted to the user. resp: bool The default value assumed by the caller when user simply types ENTER. Returns ------- bool True if the user response is 'y' or 'Y' False if the user response is 'n' or 'N'
https://github.com/opengisch/pum/blob/eaf6af92d723ace60b9e982d7f69b98e00606959/pum/utils/utils.py#L4-L45
Jaymon/endpoints
endpoints/decorators/auth.py
AuthDecorator.handle_target
def handle_target(self, request, controller_args, controller_kwargs): """Only here to set self.request and get rid of it after this will set self.request so the target method can access request using self.request, just like in the controller. """ self.request = request super(AuthDecorator, self).handle_target(request, controller_args, controller_kwargs) del self.request
python
def handle_target(self, request, controller_args, controller_kwargs): """Only here to set self.request and get rid of it after this will set self.request so the target method can access request using self.request, just like in the controller. """ self.request = request super(AuthDecorator, self).handle_target(request, controller_args, controller_kwargs) del self.request
Only here to set self.request and get rid of it after this will set self.request so the target method can access request using self.request, just like in the controller.
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/decorators/auth.py#L190-L198
Jaymon/endpoints
endpoints/client.py
HTTPClient.get
def get(self, uri, query=None, **kwargs): """make a GET request""" return self.fetch('get', uri, query, **kwargs)
python
def get(self, uri, query=None, **kwargs): """make a GET request""" return self.fetch('get', uri, query, **kwargs)
make a GET request
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L41-L43
Jaymon/endpoints
endpoints/client.py
HTTPClient.post
def post(self, uri, body=None, **kwargs): """make a POST request""" return self.fetch('post', uri, kwargs.pop("query", {}), body, **kwargs)
python
def post(self, uri, body=None, **kwargs): """make a POST request""" return self.fetch('post', uri, kwargs.pop("query", {}), body, **kwargs)
make a POST request
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L45-L47
Jaymon/endpoints
endpoints/client.py
HTTPClient.post_file
def post_file(self, uri, body, files, **kwargs): """POST a file""" # requests doesn't actually need us to open the files but we do anyway because # if we don't then the filename isn't preserved, so we assume each string # value is a filepath for key in files.keys(): if isinstance(files[key], basestring): files[key] = open(files[key], 'rb') kwargs["files"] = files # we ignore content type for posting files since it requires very specific things ct = self.headers.pop("content-type", None) ret = self.fetch('post', uri, {}, body, **kwargs) if ct: self.headers["content-type"] = ct # close all the files for fp in files.values(): fp.close() return ret
python
def post_file(self, uri, body, files, **kwargs): """POST a file""" # requests doesn't actually need us to open the files but we do anyway because # if we don't then the filename isn't preserved, so we assume each string # value is a filepath for key in files.keys(): if isinstance(files[key], basestring): files[key] = open(files[key], 'rb') kwargs["files"] = files # we ignore content type for posting files since it requires very specific things ct = self.headers.pop("content-type", None) ret = self.fetch('post', uri, {}, body, **kwargs) if ct: self.headers["content-type"] = ct # close all the files for fp in files.values(): fp.close() return ret
POST a file
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L49-L68
Jaymon/endpoints
endpoints/client.py
HTTPClient.delete
def delete(self, uri, query=None, **kwargs): """make a DELETE request""" return self.fetch('delete', uri, query, **kwargs)
python
def delete(self, uri, query=None, **kwargs): """make a DELETE request""" return self.fetch('delete', uri, query, **kwargs)
make a DELETE request
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L70-L72
Jaymon/endpoints
endpoints/client.py
HTTPClient.fetch
def fetch(self, method, uri, query=None, body=None, **kwargs): """ wrapper method that all the top level methods (get, post, etc.) use to actually make the request """ if not query: query = {} fetch_url = self.get_fetch_url(uri, query) args = [fetch_url] kwargs.setdefault("timeout", self.timeout) kwargs["headers"] = self.get_fetch_headers(method, kwargs.get("headers", {})) if body: if self.is_json(kwargs["headers"]): kwargs['json'] = self.get_fetch_body(body) else: kwargs['data'] = self.get_fetch_body(body) res = self.get_fetch_request(method, *args, **kwargs) #res = requests.request(method, *args, **kwargs) res = self.get_fetch_response(res) self.response = res return res
python
def fetch(self, method, uri, query=None, body=None, **kwargs): """ wrapper method that all the top level methods (get, post, etc.) use to actually make the request """ if not query: query = {} fetch_url = self.get_fetch_url(uri, query) args = [fetch_url] kwargs.setdefault("timeout", self.timeout) kwargs["headers"] = self.get_fetch_headers(method, kwargs.get("headers", {})) if body: if self.is_json(kwargs["headers"]): kwargs['json'] = self.get_fetch_body(body) else: kwargs['data'] = self.get_fetch_body(body) res = self.get_fetch_request(method, *args, **kwargs) #res = requests.request(method, *args, **kwargs) res = self.get_fetch_response(res) self.response = res return res
wrapper method that all the top level methods (get, post, etc.) use to actually make the request
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L74-L97
Jaymon/endpoints
endpoints/client.py
HTTPClient.get_fetch_headers
def get_fetch_headers(self, method, headers): """merge class headers with passed in headers :param method: string, (eg, GET or POST), this is passed in so you can customize headers based on the method that you are calling :param headers: dict, all the headers passed into the fetch method :returns: passed in headers merged with global class headers """ all_headers = self.headers.copy() if headers: all_headers.update(headers) return Headers(all_headers)
python
def get_fetch_headers(self, method, headers): """merge class headers with passed in headers :param method: string, (eg, GET or POST), this is passed in so you can customize headers based on the method that you are calling :param headers: dict, all the headers passed into the fetch method :returns: passed in headers merged with global class headers """ all_headers = self.headers.copy() if headers: all_headers.update(headers) return Headers(all_headers)
merge class headers with passed in headers :param method: string, (eg, GET or POST), this is passed in so you can customize headers based on the method that you are calling :param headers: dict, all the headers passed into the fetch method :returns: passed in headers merged with global class headers
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L142-L153
Jaymon/endpoints
endpoints/client.py
HTTPClient.get_fetch_request
def get_fetch_request(self, method, fetch_url, *args, **kwargs): """This is handy if you want to modify the request right before passing it to requests, or you want to do something extra special customized :param method: string, the http method (eg, GET, POST) :param fetch_url: string, the full url with query params :param *args: any other positional arguments :param **kwargs: any keyword arguments to pass to requests :returns: a requests.Response compatible object instance """ return requests.request(method, fetch_url, *args, **kwargs)
python
def get_fetch_request(self, method, fetch_url, *args, **kwargs): """This is handy if you want to modify the request right before passing it to requests, or you want to do something extra special customized :param method: string, the http method (eg, GET, POST) :param fetch_url: string, the full url with query params :param *args: any other positional arguments :param **kwargs: any keyword arguments to pass to requests :returns: a requests.Response compatible object instance """ return requests.request(method, fetch_url, *args, **kwargs)
This is handy if you want to modify the request right before passing it to requests, or you want to do something extra special customized :param method: string, the http method (eg, GET, POST) :param fetch_url: string, the full url with query params :param *args: any other positional arguments :param **kwargs: any keyword arguments to pass to requests :returns: a requests.Response compatible object instance
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L158-L168
Jaymon/endpoints
endpoints/client.py
HTTPClient.get_fetch_response
def get_fetch_response(self, res): """the goal of this method is to make the requests object more endpoints like res -- requests Response -- the native requests response instance, we manipulate it a bit to make it look a bit more like the internal endpoints.Response object """ res.code = res.status_code res.headers = Headers(res.headers) res._body = None res.body = '' body = res.content if body: if self.is_json(res.headers): res._body = res.json() else: res._body = body res.body = String(body, res.encoding) return res
python
def get_fetch_response(self, res): """the goal of this method is to make the requests object more endpoints like res -- requests Response -- the native requests response instance, we manipulate it a bit to make it look a bit more like the internal endpoints.Response object """ res.code = res.status_code res.headers = Headers(res.headers) res._body = None res.body = '' body = res.content if body: if self.is_json(res.headers): res._body = res.json() else: res._body = body res.body = String(body, res.encoding) return res
the goal of this method is to make the requests object more endpoints like res -- requests Response -- the native requests response instance, we manipulate it a bit to make it look a bit more like the internal endpoints.Response object
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L170-L189
Jaymon/endpoints
endpoints/client.py
HTTPClient.is_json
def is_json(self, headers): """return true if content_type is a json content type""" ret = False ct = headers.get("content-type", "").lower() if ct: ret = ct.lower().rfind("json") >= 0 return ret
python
def is_json(self, headers): """return true if content_type is a json content type""" ret = False ct = headers.get("content-type", "").lower() if ct: ret = ct.lower().rfind("json") >= 0 return ret
return true if content_type is a json content type
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/client.py#L191-L197
Jaymon/endpoints
endpoints/reflection.py
ReflectMethod.desc
def desc(self): """return the description of this endpoint""" doc = None def visit_FunctionDef(node): """ https://docs.python.org/2/library/ast.html#ast.NodeVisitor.visit """ if node.name != self.method_name: return doc = ast.get_docstring(node) raise StopIteration(doc if doc else "") target = self.controller.controller_class try: node_iter = ast.NodeVisitor() node_iter.visit_FunctionDef = visit_FunctionDef node_iter.visit(ast.parse(inspect.getsource(target))) except StopIteration as e: doc = str(e) if not doc: doc = "" return doc
python
def desc(self): """return the description of this endpoint""" doc = None def visit_FunctionDef(node): """ https://docs.python.org/2/library/ast.html#ast.NodeVisitor.visit """ if node.name != self.method_name: return doc = ast.get_docstring(node) raise StopIteration(doc if doc else "") target = self.controller.controller_class try: node_iter = ast.NodeVisitor() node_iter.visit_FunctionDef = visit_FunctionDef node_iter.visit(ast.parse(inspect.getsource(target))) except StopIteration as e: doc = str(e) if not doc: doc = "" return doc
return the description of this endpoint
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/reflection.py#L83-L104
Jaymon/endpoints
endpoints/reflection.py
ReflectMethod.params
def params(self): """return information about the params that the given http option takes""" ret = {} for rd in self.decorators: args = rd.args kwargs = rd.kwargs if param in rd: is_required = kwargs.get('required', 'default' not in kwargs) ret[args[0]] = {'required': is_required, 'other_names': args[1:], 'options': kwargs} return ret
python
def params(self): """return information about the params that the given http option takes""" ret = {} for rd in self.decorators: args = rd.args kwargs = rd.kwargs if param in rd: is_required = kwargs.get('required', 'default' not in kwargs) ret[args[0]] = {'required': is_required, 'other_names': args[1:], 'options': kwargs} return ret
return information about the params that the given http option takes
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/reflection.py#L107-L117
Jaymon/endpoints
endpoints/reflection.py
ReflectController.decorators
def decorators(self): """Get all the decorators of all the option methods in the class http://stackoverflow.com/questions/5910703/ specifically, I used this answer http://stackoverflow.com/a/9580006 """ res = collections.defaultdict(list) mmap = {} def get_val(na, default=None): ret = None if isinstance(na, ast.Num): repr_n = repr(na.n) val = na.n vtype = float if '.' in repr_n else int ret = vtype(val) elif isinstance(na, ast.Str): ret = str(na.s) elif isinstance(na, ast.Name): # http://stackoverflow.com/questions/12700893/ ret = getattr(builtins, na.id, None) if not ret: ret = na.id if ret == 'True': ret = True elif ret == 'False': ret = False elif isinstance(na, ast.Dict): if na.keys: ret = {get_val(na_[0]): get_val(na_[1]) for na_ in zip(na.keys, na.values)} else: ret = {} elif isinstance(na, (ast.List, ast.Tuple)): if na.elts: ret = [get_val(na_) for na_ in na.elts] else: ret = [] if isinstance(na, ast.Tuple): ret = tuple(ret) else: ret = default return ret def is_super(childnode, parentnode): """returns true if child node has a super() call to parent node""" ret = False for n in childnode.body: if not isinstance(n, ast.Expr): continue try: func = n.value.func func_name = func.attr if func_name == parentnode.name: ret = isinstance(func.value, ast.Call) break except AttributeError as e: ret = False return ret def visit_FunctionDef(node): """ https://docs.python.org/2/library/ast.html#ast.NodeVisitor.visit """ add_decs = True if node.name in res: add_decs = is_super(mmap[node.name], node) mmap[node.name] = node if add_decs: for n in node.decorator_list: d = {} name = '' args = [] kwargs = {} if isinstance(n, ast.Call): name = n.func.attr if isinstance(n.func, ast.Attribute) else n.func.id for an in n.args: args.append(get_val(an)) for an in n.keywords: kwargs[an.arg] = get_val(an.value) else: name = n.attr if isinstance(n, ast.Attribute) else n.id d = { "name": name, "args": args, "kwargs": kwargs } m = self.module decor = getattr(m, name, None) if decor: d["decorator"] = decor #res[node.name].append((name, args, kwargs)) res[node.name].append(self.decorator_class(**d)) node_iter = ast.NodeVisitor() node_iter.visit_FunctionDef = visit_FunctionDef for target_cls in inspect.getmro(self.controller_class): if target_cls == Controller: break node_iter.visit(ast.parse(inspect.getsource(target_cls))) return res
python
def decorators(self): """Get all the decorators of all the option methods in the class http://stackoverflow.com/questions/5910703/ specifically, I used this answer http://stackoverflow.com/a/9580006 """ res = collections.defaultdict(list) mmap = {} def get_val(na, default=None): ret = None if isinstance(na, ast.Num): repr_n = repr(na.n) val = na.n vtype = float if '.' in repr_n else int ret = vtype(val) elif isinstance(na, ast.Str): ret = str(na.s) elif isinstance(na, ast.Name): # http://stackoverflow.com/questions/12700893/ ret = getattr(builtins, na.id, None) if not ret: ret = na.id if ret == 'True': ret = True elif ret == 'False': ret = False elif isinstance(na, ast.Dict): if na.keys: ret = {get_val(na_[0]): get_val(na_[1]) for na_ in zip(na.keys, na.values)} else: ret = {} elif isinstance(na, (ast.List, ast.Tuple)): if na.elts: ret = [get_val(na_) for na_ in na.elts] else: ret = [] if isinstance(na, ast.Tuple): ret = tuple(ret) else: ret = default return ret def is_super(childnode, parentnode): """returns true if child node has a super() call to parent node""" ret = False for n in childnode.body: if not isinstance(n, ast.Expr): continue try: func = n.value.func func_name = func.attr if func_name == parentnode.name: ret = isinstance(func.value, ast.Call) break except AttributeError as e: ret = False return ret def visit_FunctionDef(node): """ https://docs.python.org/2/library/ast.html#ast.NodeVisitor.visit """ add_decs = True if node.name in res: add_decs = is_super(mmap[node.name], node) mmap[node.name] = node if add_decs: for n in node.decorator_list: d = {} name = '' args = [] kwargs = {} if isinstance(n, ast.Call): name = n.func.attr if isinstance(n.func, ast.Attribute) else n.func.id for an in n.args: args.append(get_val(an)) for an in n.keywords: kwargs[an.arg] = get_val(an.value) else: name = n.attr if isinstance(n, ast.Attribute) else n.id d = { "name": name, "args": args, "kwargs": kwargs } m = self.module decor = getattr(m, name, None) if decor: d["decorator"] = decor #res[node.name].append((name, args, kwargs)) res[node.name].append(self.decorator_class(**d)) node_iter = ast.NodeVisitor() node_iter.visit_FunctionDef = visit_FunctionDef for target_cls in inspect.getmro(self.controller_class): if target_cls == Controller: break node_iter.visit(ast.parse(inspect.getsource(target_cls))) return res
Get all the decorators of all the option methods in the class http://stackoverflow.com/questions/5910703/ specifically, I used this answer http://stackoverflow.com/a/9580006
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/reflection.py#L134-L247
Jaymon/endpoints
endpoints/reflection.py
ReflectController.desc
def desc(self): """return the description of this endpoint""" doc = inspect.getdoc(self.controller_class) if not doc: doc = '' return doc
python
def desc(self): """return the description of this endpoint""" doc = inspect.getdoc(self.controller_class) if not doc: doc = '' return doc
return the description of this endpoint
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/reflection.py#L277-L281
Jaymon/endpoints
endpoints/reflection.py
ReflectController.methods
def methods(self): """ return the supported http method options that this class supports return what http method options this endpoint supports (eg, POST, GET) http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html :returns: dict, each http method (eg, GET, POST) will have a key with the value being every method from the controller that can satisfy the http method """ ret = {} method_regex = re.compile(r"^[A-Z][A-Z0-9]+(_|$)") controller_methods = inspect.getmembers(self.controller_class) for controller_method_name, controller_method in controller_methods: if controller_method_name.startswith('_'): continue if method_regex.match(controller_method_name): method = self.method_class( controller_method_name, controller_method, controller=self ) ret.setdefault(method.name, []) ret[method.name].append(method) return ret
python
def methods(self): """ return the supported http method options that this class supports return what http method options this endpoint supports (eg, POST, GET) http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html :returns: dict, each http method (eg, GET, POST) will have a key with the value being every method from the controller that can satisfy the http method """ ret = {} method_regex = re.compile(r"^[A-Z][A-Z0-9]+(_|$)") controller_methods = inspect.getmembers(self.controller_class) for controller_method_name, controller_method in controller_methods: if controller_method_name.startswith('_'): continue if method_regex.match(controller_method_name): method = self.method_class( controller_method_name, controller_method, controller=self ) ret.setdefault(method.name, []) ret[method.name].append(method) return ret
return the supported http method options that this class supports return what http method options this endpoint supports (eg, POST, GET) http://www.w3.org/Protocols/rfc2616/rfc2616-sec9.html :returns: dict, each http method (eg, GET, POST) will have a key with the value being every method from the controller that can satisfy the http method
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/reflection.py#L284-L309
Jaymon/endpoints
endpoints/interface/__init__.py
BaseServer.create_call
def create_call(self, raw_request, **kwargs): """create a call object that has endpoints understandable request and response instances""" req = self.create_request(raw_request, **kwargs) res = self.create_response(**kwargs) rou = self.create_router(**kwargs) c = self.call_class(req, res, rou) return c
python
def create_call(self, raw_request, **kwargs): """create a call object that has endpoints understandable request and response instances""" req = self.create_request(raw_request, **kwargs) res = self.create_response(**kwargs) rou = self.create_router(**kwargs) c = self.call_class(req, res, rou) return c
create a call object that has endpoints understandable request and response instances
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/interface/__init__.py#L88-L95
Jaymon/endpoints
endpoints/decorators/limit.py
RateLimitDecorator.target
def target(self, request, key, limit, ttl): """this will only run the request if the key has a value, if you want to fail if the key doesn't have a value, then normalize_key() should raise an exception :param request: Request, the request instance :param key: string, the unique key for the endpoint, this is generated using self.normalize_key, so override that method to customize the key :param limit: int, max requests that should be received in ttl :param ttl: int, how many seconds the request should be throttled (eg, 3600 = 1 hour) """ ret = True if key: #backend = self.create_backend() #method = getattr(backend, "normalize_limit", None) #if method: # limit = method(request, limit) #method = getattr(backend, "normalize_ttl", None) #if method: # ttl = method(request, ttl) #ret = backend.target(request, key, limit, ttl) ret = super(RateLimitDecorator, self).target(request, key, limit, ttl) else: logger.warn("No ratelimit key found for {}".format(request.path)) return ret
python
def target(self, request, key, limit, ttl): """this will only run the request if the key has a value, if you want to fail if the key doesn't have a value, then normalize_key() should raise an exception :param request: Request, the request instance :param key: string, the unique key for the endpoint, this is generated using self.normalize_key, so override that method to customize the key :param limit: int, max requests that should be received in ttl :param ttl: int, how many seconds the request should be throttled (eg, 3600 = 1 hour) """ ret = True if key: #backend = self.create_backend() #method = getattr(backend, "normalize_limit", None) #if method: # limit = method(request, limit) #method = getattr(backend, "normalize_ttl", None) #if method: # ttl = method(request, ttl) #ret = backend.target(request, key, limit, ttl) ret = super(RateLimitDecorator, self).target(request, key, limit, ttl) else: logger.warn("No ratelimit key found for {}".format(request.path)) return ret
this will only run the request if the key has a value, if you want to fail if the key doesn't have a value, then normalize_key() should raise an exception :param request: Request, the request instance :param key: string, the unique key for the endpoint, this is generated using self.normalize_key, so override that method to customize the key :param limit: int, max requests that should be received in ttl :param ttl: int, how many seconds the request should be throttled (eg, 3600 = 1 hour)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/decorators/limit.py#L121-L146
Jaymon/endpoints
endpoints/decorators/limit.py
RateLimitDecorator.decorate
def decorate(self, func, limit=0, ttl=0, *anoop, **kwnoop): """see target for an explanation of limit and ttl""" self.limit = int(limit) self.ttl = int(ttl) return super(RateLimitDecorator, self).decorate(func, target=None, *anoop, **kwnoop)
python
def decorate(self, func, limit=0, ttl=0, *anoop, **kwnoop): """see target for an explanation of limit and ttl""" self.limit = int(limit) self.ttl = int(ttl) return super(RateLimitDecorator, self).decorate(func, target=None, *anoop, **kwnoop)
see target for an explanation of limit and ttl
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/decorators/limit.py#L152-L156
Jaymon/endpoints
endpoints/decorators/limit.py
ratelimit.decorate
def decorate(self, func, limit, ttl, *anoop, **kwnoop): """make limit and ttl required""" return super(ratelimit, self).decorate(func, limit, ttl, *anoop, **kwnoop)
python
def decorate(self, func, limit, ttl, *anoop, **kwnoop): """make limit and ttl required""" return super(ratelimit, self).decorate(func, limit, ttl, *anoop, **kwnoop)
make limit and ttl required
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/decorators/limit.py#L179-L181
Jaymon/endpoints
endpoints/bin/wsgiserver.py
console
def console(): ''' cli hook return -- integer -- the exit code ''' parser = argparse.ArgumentParser(description='Start an endpoints WSGI server', add_help=True) #parser.add_argument('--debug', dest='debug', action='store_true', help='print debugging info') parser.add_argument( "-v", "--version", action='version', version="%(prog)s {}".format(endpoints.__version__) ) parser.add_argument( "--quiet", action='store_true', dest='quiet' ) parser.add_argument( '--prefix', "--controller-prefix", "-P", required=True, help='The endpoints controller prefix' ) parser.add_argument( '--file', "-F", "--wsgi-file", "--wsgifile", dest="file", default="", help='The wsgi file, the file that has an application callable' ) parser.add_argument( '--host', "-H", required=True, help='The host to serve on in the form host:port' ) parser.add_argument( '--count', "-C", help='How many requests to process until self termination', type=int, default=0 ) parser.add_argument( '--dir', "-D", "--directory", dest="directory", default=os.getcwd(), help='directory to run the server in, usually contains the prefix module path', ) # parser.add_argument( # '--config', "--config-script", "-S", # dest="config_script", # default="", # help='This script will be loaded before Server is created allowing custom configuration', # ) args = parser.parse_args() # we want to make sure the directory can be imported from since chances are # the prefix module lives in that directory sys.path.append(args.directory) if not args.quiet: # https://docs.python.org/2.7/library/logging.html#logging.basicConfig logging.basicConfig(format="%(message)s", level=logging.DEBUG, stream=sys.stdout) logger = logging.getLogger(__name__) os.environ["ENDPOINTS_HOST"] = args.host environ.HOST = args.host os.environ["ENDPOINTS_PREFIX"] = args.prefix #environ.PREFIXES = args.prefix config = {} if args.file: # load the configuration file config = runpy.run_path(args.file) # if args.config_script: # # load a config script so you can customize the environment # h = "wsgiserver_config_{}".format(uuid.uuid4()) # config_module = imp.load_source(h, args.config_script) s = Server() if "application" in config: s.application = config["application"] if args.count: logger.info("Listening on {} for {} requests".format(args.host, args.prefix)) s.serve_count(args.count) else: logger.info("Listening on {}".format(args.host)) s.serve_forever() return 0
python
def console(): ''' cli hook return -- integer -- the exit code ''' parser = argparse.ArgumentParser(description='Start an endpoints WSGI server', add_help=True) #parser.add_argument('--debug', dest='debug', action='store_true', help='print debugging info') parser.add_argument( "-v", "--version", action='version', version="%(prog)s {}".format(endpoints.__version__) ) parser.add_argument( "--quiet", action='store_true', dest='quiet' ) parser.add_argument( '--prefix', "--controller-prefix", "-P", required=True, help='The endpoints controller prefix' ) parser.add_argument( '--file', "-F", "--wsgi-file", "--wsgifile", dest="file", default="", help='The wsgi file, the file that has an application callable' ) parser.add_argument( '--host', "-H", required=True, help='The host to serve on in the form host:port' ) parser.add_argument( '--count', "-C", help='How many requests to process until self termination', type=int, default=0 ) parser.add_argument( '--dir', "-D", "--directory", dest="directory", default=os.getcwd(), help='directory to run the server in, usually contains the prefix module path', ) # parser.add_argument( # '--config', "--config-script", "-S", # dest="config_script", # default="", # help='This script will be loaded before Server is created allowing custom configuration', # ) args = parser.parse_args() # we want to make sure the directory can be imported from since chances are # the prefix module lives in that directory sys.path.append(args.directory) if not args.quiet: # https://docs.python.org/2.7/library/logging.html#logging.basicConfig logging.basicConfig(format="%(message)s", level=logging.DEBUG, stream=sys.stdout) logger = logging.getLogger(__name__) os.environ["ENDPOINTS_HOST"] = args.host environ.HOST = args.host os.environ["ENDPOINTS_PREFIX"] = args.prefix #environ.PREFIXES = args.prefix config = {} if args.file: # load the configuration file config = runpy.run_path(args.file) # if args.config_script: # # load a config script so you can customize the environment # h = "wsgiserver_config_{}".format(uuid.uuid4()) # config_module = imp.load_source(h, args.config_script) s = Server() if "application" in config: s.application = config["application"] if args.count: logger.info("Listening on {} for {} requests".format(args.host, args.prefix)) s.serve_count(args.count) else: logger.info("Listening on {}".format(args.host)) s.serve_forever() return 0
cli hook return -- integer -- the exit code
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/bin/wsgiserver.py#L22-L113
Jaymon/endpoints
endpoints/utils.py
Base64.encode
def encode(cls, s): """converts a plain text string to base64 encoding :param s: unicode str|bytes, the base64 encoded string :returns: unicode str """ b = ByteString(s) be = base64.b64encode(b).strip() return String(be)
python
def encode(cls, s): """converts a plain text string to base64 encoding :param s: unicode str|bytes, the base64 encoded string :returns: unicode str """ b = ByteString(s) be = base64.b64encode(b).strip() return String(be)
converts a plain text string to base64 encoding :param s: unicode str|bytes, the base64 encoded string :returns: unicode str
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/utils.py#L119-L127
Jaymon/endpoints
endpoints/utils.py
Base64.decode
def decode(cls, s): """decodes a base64 string to plain text :param s: unicode str|bytes, the base64 encoded string :returns: unicode str """ b = ByteString(s) bd = base64.b64decode(b) return String(bd)
python
def decode(cls, s): """decodes a base64 string to plain text :param s: unicode str|bytes, the base64 encoded string :returns: unicode str """ b = ByteString(s) bd = base64.b64decode(b) return String(bd)
decodes a base64 string to plain text :param s: unicode str|bytes, the base64 encoded string :returns: unicode str
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/utils.py#L130-L138
Jaymon/endpoints
endpoints/utils.py
MimeType.find_type
def find_type(cls, val): """return the mimetype from the given string value if value is a path, then the extension will be found, if val is an extension then that will be used to find the mimetype """ mt = "" index = val.rfind(".") if index == -1: val = "fake.{}".format(val) elif index == 0: val = "fake{}".format(val) mt = mimetypes.guess_type(val)[0] if mt is None: mt = "" return mt
python
def find_type(cls, val): """return the mimetype from the given string value if value is a path, then the extension will be found, if val is an extension then that will be used to find the mimetype """ mt = "" index = val.rfind(".") if index == -1: val = "fake.{}".format(val) elif index == 0: val = "fake{}".format(val) mt = mimetypes.guess_type(val)[0] if mt is None: mt = "" return mt
return the mimetype from the given string value if value is a path, then the extension will be found, if val is an extension then that will be used to find the mimetype
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/utils.py#L154-L171
Jaymon/endpoints
endpoints/utils.py
AcceptHeader._sort
def _sort(self, a, b): ''' sort the headers according to rfc 2616 so when __iter__ is called, the accept media types are in order from most preferred to least preferred ''' ret = 0 # first we check q, higher values win: if a[1] != b[1]: ret = cmp(a[1], b[1]) else: found = False for i in range(2): ai = a[0][i] bi = b[0][i] if ai == '*': if bi != '*': ret = -1 found = True break else: # both *, more verbose params win ret = cmp(len(a[2]), len(b[2])) found = True break elif bi == '*': ret = 1 found = True break if not found: ret = cmp(len(a[2]), len(b[2])) return ret
python
def _sort(self, a, b): ''' sort the headers according to rfc 2616 so when __iter__ is called, the accept media types are in order from most preferred to least preferred ''' ret = 0 # first we check q, higher values win: if a[1] != b[1]: ret = cmp(a[1], b[1]) else: found = False for i in range(2): ai = a[0][i] bi = b[0][i] if ai == '*': if bi != '*': ret = -1 found = True break else: # both *, more verbose params win ret = cmp(len(a[2]), len(b[2])) found = True break elif bi == '*': ret = 1 found = True break if not found: ret = cmp(len(a[2]), len(b[2])) return ret
sort the headers according to rfc 2616 so when __iter__ is called, the accept media types are in order from most preferred to least preferred
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/utils.py#L211-L244
Jaymon/endpoints
endpoints/utils.py
AcceptHeader.filter
def filter(self, media_type, **params): """ iterate all the accept media types that match media_type media_type -- string -- the media type to filter by **params -- dict -- further filter by key: val return -- generator -- yields all matching media type info things """ mtype, msubtype = self._split_media_type(media_type) for x in self.__iter__(): # all the params have to match to make the media type valid matched = True for k, v in params.items(): if x[2].get(k, None) != v: matched = False break if matched: if x[0][0] == '*': if x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x elif mtype == '*': if msubtype == '*': yield x elif x[0][1] == msubtype: yield x elif x[0][0] == mtype: if msubtype == '*': yield x elif x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x
python
def filter(self, media_type, **params): """ iterate all the accept media types that match media_type media_type -- string -- the media type to filter by **params -- dict -- further filter by key: val return -- generator -- yields all matching media type info things """ mtype, msubtype = self._split_media_type(media_type) for x in self.__iter__(): # all the params have to match to make the media type valid matched = True for k, v in params.items(): if x[2].get(k, None) != v: matched = False break if matched: if x[0][0] == '*': if x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x elif mtype == '*': if msubtype == '*': yield x elif x[0][1] == msubtype: yield x elif x[0][0] == mtype: if msubtype == '*': yield x elif x[0][1] == '*': yield x elif x[0][1] == msubtype: yield x
iterate all the accept media types that match media_type media_type -- string -- the media type to filter by **params -- dict -- further filter by key: val return -- generator -- yields all matching media type info things
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/utils.py#L259-L300
Jaymon/endpoints
endpoints/interface/wsgi/__init__.py
Application.create_request
def create_request(self, raw_request, **kwargs): """ create instance of request raw_request -- the raw request object retrieved from a WSGI server """ r = self.request_class() for k, v in raw_request.items(): if k.startswith('HTTP_'): r.set_header(k[5:], v) else: r.environ[k] = v r.method = raw_request['REQUEST_METHOD'] r.path = raw_request['PATH_INFO'] r.query = raw_request['QUERY_STRING'] # handle headers not prefixed with http for k, t in {'CONTENT_TYPE': None, 'CONTENT_LENGTH': int}.items(): v = r.environ.pop(k, None) if v: r.set_header(k, t(v) if t else v) if 'wsgi.input' in raw_request: if "CONTENT_LENGTH" in raw_request and int(r.get_header("CONTENT_LENGTH", 0)) <= 0: r.body_kwargs = {} else: if r.get_header('transfer-encoding', "").lower().startswith('chunked'): raise IOError("Server does not support chunked requests") else: r.body_input = raw_request['wsgi.input'] else: r.body_kwargs = {} return r
python
def create_request(self, raw_request, **kwargs): """ create instance of request raw_request -- the raw request object retrieved from a WSGI server """ r = self.request_class() for k, v in raw_request.items(): if k.startswith('HTTP_'): r.set_header(k[5:], v) else: r.environ[k] = v r.method = raw_request['REQUEST_METHOD'] r.path = raw_request['PATH_INFO'] r.query = raw_request['QUERY_STRING'] # handle headers not prefixed with http for k, t in {'CONTENT_TYPE': None, 'CONTENT_LENGTH': int}.items(): v = r.environ.pop(k, None) if v: r.set_header(k, t(v) if t else v) if 'wsgi.input' in raw_request: if "CONTENT_LENGTH" in raw_request and int(r.get_header("CONTENT_LENGTH", 0)) <= 0: r.body_kwargs = {} else: if r.get_header('transfer-encoding', "").lower().startswith('chunked'): raise IOError("Server does not support chunked requests") else: r.body_input = raw_request['wsgi.input'] else: r.body_kwargs = {} return r
create instance of request raw_request -- the raw request object retrieved from a WSGI server
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/interface/wsgi/__init__.py#L51-L89
Jaymon/endpoints
endpoints/interface/wsgi/__init__.py
Server.application
def application(self, v): """allow overriding of the application factory, this allows you to set your own application callable that will be used to handle requests, see bin/wsgiserver.py script as an example of usage""" self._application = v self.backend.set_app(v)
python
def application(self, v): """allow overriding of the application factory, this allows you to set your own application callable that will be used to handle requests, see bin/wsgiserver.py script as an example of usage""" self._application = v self.backend.set_app(v)
allow overriding of the application factory, this allows you to set your own application callable that will be used to handle requests, see bin/wsgiserver.py script as an example of usage
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/interface/wsgi/__init__.py#L127-L132
Jaymon/endpoints
endpoints/interface/uwsgi/async.py
WebsocketApplication.create_environ
def create_environ(self, req, payload): """This will take the original request and the new websocket payload and merge them into a new request instance""" ws_req = req.copy() del ws_req.controller_info ws_req.environ.pop("wsgi.input", None) ws_req.body_kwargs = payload.body ws_req.environ["REQUEST_METHOD"] = payload.method ws_req.method = payload.method ws_req.environ["PATH_INFO"] = payload.path ws_req.path = payload.path ws_req.environ["WS_PAYLOAD"] = payload ws_req.environ["WS_ORIGINAL"] = req ws_req.payload = payload ws_req.parent = req return {"WS_REQUEST": ws_req}
python
def create_environ(self, req, payload): """This will take the original request and the new websocket payload and merge them into a new request instance""" ws_req = req.copy() del ws_req.controller_info ws_req.environ.pop("wsgi.input", None) ws_req.body_kwargs = payload.body ws_req.environ["REQUEST_METHOD"] = payload.method ws_req.method = payload.method ws_req.environ["PATH_INFO"] = payload.path ws_req.path = payload.path ws_req.environ["WS_PAYLOAD"] = payload ws_req.environ["WS_ORIGINAL"] = req ws_req.payload = payload ws_req.parent = req return {"WS_REQUEST": ws_req}
This will take the original request and the new websocket payload and merge them into a new request instance
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/interface/uwsgi/async.py#L126-L147
Jaymon/endpoints
endpoints/interface/wsgi/client.py
find_module_path
def find_module_path(): """find where the master module is located""" master_modname = __name__.split(".", 1)[0] master_module = sys.modules[master_modname] #return os.path.dirname(os.path.realpath(os.path.join(inspect.getsourcefile(endpoints), ".."))) path = os.path.dirname(inspect.getsourcefile(master_module)) return path
python
def find_module_path(): """find where the master module is located""" master_modname = __name__.split(".", 1)[0] master_module = sys.modules[master_modname] #return os.path.dirname(os.path.realpath(os.path.join(inspect.getsourcefile(endpoints), ".."))) path = os.path.dirname(inspect.getsourcefile(master_module)) return path
find where the master module is located
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/interface/wsgi/client.py#L17-L23
Jaymon/endpoints
endpoints/http.py
Headers._convert_string_name
def _convert_string_name(self, k): """converts things like FOO_BAR to Foo-Bar which is the normal form""" k = String(k, "iso-8859-1") klower = k.lower().replace('_', '-') bits = klower.split('-') return "-".join((bit.title() for bit in bits))
python
def _convert_string_name(self, k): """converts things like FOO_BAR to Foo-Bar which is the normal form""" k = String(k, "iso-8859-1") klower = k.lower().replace('_', '-') bits = klower.split('-') return "-".join((bit.title() for bit in bits))
converts things like FOO_BAR to Foo-Bar which is the normal form
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L50-L55
Jaymon/endpoints
endpoints/http.py
Url.uri
def uri(self): """return the uri, which is everything but base (no scheme, host, etc)""" uristring = self.path if self.query: uristring += "?{}".format(self.query) if self.fragment: uristring += "#{}".format(self.fragment) return uristring
python
def uri(self): """return the uri, which is everything but base (no scheme, host, etc)""" uristring = self.path if self.query: uristring += "?{}".format(self.query) if self.fragment: uristring += "#{}".format(self.fragment) return uristring
return the uri, which is everything but base (no scheme, host, etc)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L271-L279
Jaymon/endpoints
endpoints/http.py
Url.parse_query
def parse_query(cls, query): """return name=val&name2=val2 strings into {name: val} dict""" if not query: return {} d = {} # https://docs.python.org/2/library/urlparse.html for k, kv in urlparse.parse_qs(query, True, strict_parsing=True).items(): #k = k.rstrip("[]") # strip out php type array designated variables if len(kv) > 1: d[k] = kv else: d[k] = kv[0] return d
python
def parse_query(cls, query): """return name=val&name2=val2 strings into {name: val} dict""" if not query: return {} d = {} # https://docs.python.org/2/library/urlparse.html for k, kv in urlparse.parse_qs(query, True, strict_parsing=True).items(): #k = k.rstrip("[]") # strip out php type array designated variables if len(kv) > 1: d[k] = kv else: d[k] = kv[0] return d
return name=val&name2=val2 strings into {name: val} dict
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L434-L447
Jaymon/endpoints
endpoints/http.py
Url._normalize_params
def _normalize_params(self, *paths, **query_kwargs): """a lot of the helper methods are very similar, this handles their arguments""" kwargs = {} if paths: fragment = paths[-1] if fragment: if fragment.startswith("#"): kwargs["fragment"] = fragment paths.pop(-1) kwargs["path"] = "/".join(self.normalize_paths(*paths)) kwargs["query_kwargs"] = query_kwargs return kwargs
python
def _normalize_params(self, *paths, **query_kwargs): """a lot of the helper methods are very similar, this handles their arguments""" kwargs = {} if paths: fragment = paths[-1] if fragment: if fragment.startswith("#"): kwargs["fragment"] = fragment paths.pop(-1) kwargs["path"] = "/".join(self.normalize_paths(*paths)) kwargs["query_kwargs"] = query_kwargs return kwargs
a lot of the helper methods are very similar, this handles their arguments
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L465-L479
Jaymon/endpoints
endpoints/http.py
Url.split_hostname_from_port
def split_hostname_from_port(cls, hostname): """given a hostname:port return a tuple (hostname, port)""" bits = hostname.split(":", 2) p = None d = bits[0] if len(bits) == 2: p = int(bits[1]) return d, p
python
def split_hostname_from_port(cls, hostname): """given a hostname:port return a tuple (hostname, port)""" bits = hostname.split(":", 2) p = None d = bits[0] if len(bits) == 2: p = int(bits[1]) return d, p
given a hostname:port return a tuple (hostname, port)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L482-L490
Jaymon/endpoints
endpoints/http.py
Url.add
def add(self, **kwargs): """Just a shortcut to change the current url, equivalent to Url(self, **kwargs)""" if "path" in kwargs: path = kwargs["path"] if isinstance(path, bytes): path = String(path) if not path[0].startswith("/"): paths = self.normalize_paths(self.path, path) else: paths = self.normalize_paths(path) kwargs["path"] = "/".join(paths) return self.create(self, **kwargs)
python
def add(self, **kwargs): """Just a shortcut to change the current url, equivalent to Url(self, **kwargs)""" if "path" in kwargs: path = kwargs["path"] if isinstance(path, bytes): path = String(path) if not path[0].startswith("/"): paths = self.normalize_paths(self.path, path) else: paths = self.normalize_paths(path) kwargs["path"] = "/".join(paths) return self.create(self, **kwargs)
Just a shortcut to change the current url, equivalent to Url(self, **kwargs)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L495-L506
Jaymon/endpoints
endpoints/http.py
Url.controller
def controller(self, *paths, **query_kwargs): """create a new url object using the controller path as a base if you have a controller `foo.BarController` then this would create a new Url instance with `host/foo/bar` as the base path, so any *paths will be appended to `/foo/bar` :example: # controller foo.BarController print url # http://host.com/foo/bar/some_random_path print url.controller() # http://host.com/foo/bar print url.controller("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the controller path :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) if self.controller_path: if "path" in kwargs: paths = self.normalize_paths(self.controller_path, kwargs["path"]) kwargs["path"] = "/".join(paths) else: kwargs["path"] = self.controller_path return self.create(self.root, **kwargs)
python
def controller(self, *paths, **query_kwargs): """create a new url object using the controller path as a base if you have a controller `foo.BarController` then this would create a new Url instance with `host/foo/bar` as the base path, so any *paths will be appended to `/foo/bar` :example: # controller foo.BarController print url # http://host.com/foo/bar/some_random_path print url.controller() # http://host.com/foo/bar print url.controller("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the controller path :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) if self.controller_path: if "path" in kwargs: paths = self.normalize_paths(self.controller_path, kwargs["path"]) kwargs["path"] = "/".join(paths) else: kwargs["path"] = self.controller_path return self.create(self.root, **kwargs)
create a new url object using the controller path as a base if you have a controller `foo.BarController` then this would create a new Url instance with `host/foo/bar` as the base path, so any *paths will be appended to `/foo/bar` :example: # controller foo.BarController print url # http://host.com/foo/bar/some_random_path print url.controller() # http://host.com/foo/bar print url.controller("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the controller path :param **query_kwargs: dict, any query string params to add
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L535-L560
Jaymon/endpoints
endpoints/http.py
Url.base
def base(self, *paths, **query_kwargs): """create a new url object using the current base path as a base if you had requested /foo/bar, then this would append *paths and **query_kwargs to /foo/bar :example: # current path: /foo/bar print url # http://host.com/foo/bar print url.base() # http://host.com/foo/bar print url.base("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) if self.path: if "path" in kwargs: paths = self.normalize_paths(self.path, kwargs["path"]) kwargs["path"] = "/".join(paths) else: kwargs["path"] = self.path return self.create(self.root, **kwargs)
python
def base(self, *paths, **query_kwargs): """create a new url object using the current base path as a base if you had requested /foo/bar, then this would append *paths and **query_kwargs to /foo/bar :example: # current path: /foo/bar print url # http://host.com/foo/bar print url.base() # http://host.com/foo/bar print url.base("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) if self.path: if "path" in kwargs: paths = self.normalize_paths(self.path, kwargs["path"]) kwargs["path"] = "/".join(paths) else: kwargs["path"] = self.path return self.create(self.root, **kwargs)
create a new url object using the current base path as a base if you had requested /foo/bar, then this would append *paths and **query_kwargs to /foo/bar :example: # current path: /foo/bar print url # http://host.com/foo/bar print url.base() # http://host.com/foo/bar print url.base("che", boom="bam") # http://host/foo/bar/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L562-L586
Jaymon/endpoints
endpoints/http.py
Url.host
def host(self, *paths, **query_kwargs): """create a new url object using the host as a base if you had requested http://host/foo/bar, then this would append *paths and **query_kwargs to http://host :example: # current url: http://host/foo/bar print url # http://host.com/foo/bar print url.host_url() # http://host.com/ print url.host_url("che", boom="bam") # http://host/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) return self.create(self.root, **kwargs)
python
def host(self, *paths, **query_kwargs): """create a new url object using the host as a base if you had requested http://host/foo/bar, then this would append *paths and **query_kwargs to http://host :example: # current url: http://host/foo/bar print url # http://host.com/foo/bar print url.host_url() # http://host.com/ print url.host_url("che", boom="bam") # http://host/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add """ kwargs = self._normalize_params(*paths, **query_kwargs) return self.create(self.root, **kwargs)
create a new url object using the host as a base if you had requested http://host/foo/bar, then this would append *paths and **query_kwargs to http://host :example: # current url: http://host/foo/bar print url # http://host.com/foo/bar print url.host_url() # http://host.com/ print url.host_url("che", boom="bam") # http://host/che?boom=bam :param *paths: list, the paths to append to the current path without query params :param **query_kwargs: dict, any query string params to add
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L588-L606
Jaymon/endpoints
endpoints/http.py
Request.accept_encoding
def accept_encoding(self): """The encoding the client requested the response to use""" # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Charset ret = "" accept_encoding = self.get_header("Accept-Charset", "") if accept_encoding: bits = re.split(r"\s+", accept_encoding) bits = bits[0].split(";") ret = bits[0] return ret
python
def accept_encoding(self): """The encoding the client requested the response to use""" # https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Charset ret = "" accept_encoding = self.get_header("Accept-Charset", "") if accept_encoding: bits = re.split(r"\s+", accept_encoding) bits = bits[0].split(";") ret = bits[0] return ret
The encoding the client requested the response to use
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L816-L825
Jaymon/endpoints
endpoints/http.py
Request.encoding
def encoding(self): """the character encoding of the request, usually only set in POST type requests""" encoding = None ct = self.get_header('content-type') if ct: ah = AcceptHeader(ct) if ah.media_types: encoding = ah.media_types[0][2].get("charset", None) return encoding
python
def encoding(self): """the character encoding of the request, usually only set in POST type requests""" encoding = None ct = self.get_header('content-type') if ct: ah = AcceptHeader(ct) if ah.media_types: encoding = ah.media_types[0][2].get("charset", None) return encoding
the character encoding of the request, usually only set in POST type requests
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L828-L837
Jaymon/endpoints
endpoints/http.py
Request.access_token
def access_token(self): """return an Oauth 2.0 Bearer access token if it can be found""" access_token = self.get_auth_bearer() if not access_token: access_token = self.query_kwargs.get('access_token', '') if not access_token: access_token = self.body_kwargs.get('access_token', '') return access_token
python
def access_token(self): """return an Oauth 2.0 Bearer access token if it can be found""" access_token = self.get_auth_bearer() if not access_token: access_token = self.query_kwargs.get('access_token', '') if not access_token: access_token = self.body_kwargs.get('access_token', '') return access_token
return an Oauth 2.0 Bearer access token if it can be found
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L840-L848
Jaymon/endpoints
endpoints/http.py
Request.client_tokens
def client_tokens(self): """try and get Oauth 2.0 client id and secret first from basic auth header, then from GET or POST parameters return -- tuple -- client_id, client_secret """ client_id, client_secret = self.get_auth_basic() if not client_id and not client_secret: client_id = self.query_kwargs.get('client_id', '') client_secret = self.query_kwargs.get('client_secret', '') if not client_id and not client_secret: client_id = self.body_kwargs.get('client_id', '') client_secret = self.body_kwargs.get('client_secret', '') return client_id, client_secret
python
def client_tokens(self): """try and get Oauth 2.0 client id and secret first from basic auth header, then from GET or POST parameters return -- tuple -- client_id, client_secret """ client_id, client_secret = self.get_auth_basic() if not client_id and not client_secret: client_id = self.query_kwargs.get('client_id', '') client_secret = self.query_kwargs.get('client_secret', '') if not client_id and not client_secret: client_id = self.body_kwargs.get('client_id', '') client_secret = self.body_kwargs.get('client_secret', '') return client_id, client_secret
try and get Oauth 2.0 client id and secret first from basic auth header, then from GET or POST parameters return -- tuple -- client_id, client_secret
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L851-L865
Jaymon/endpoints
endpoints/http.py
Request.ips
def ips(self): """return all the possible ips of this request, this will include public and private ips""" r = [] names = ['X_FORWARDED_FOR', 'CLIENT_IP', 'X_REAL_IP', 'X_FORWARDED', 'X_CLUSTER_CLIENT_IP', 'FORWARDED_FOR', 'FORWARDED', 'VIA', 'REMOTE_ADDR'] for name in names: vs = self.get_header(name, '') if vs: r.extend(map(lambda v: v.strip(), vs.split(','))) vs = self.environ.get(name, '') if vs: r.extend(map(lambda v: v.strip(), vs.split(','))) return r
python
def ips(self): """return all the possible ips of this request, this will include public and private ips""" r = [] names = ['X_FORWARDED_FOR', 'CLIENT_IP', 'X_REAL_IP', 'X_FORWARDED', 'X_CLUSTER_CLIENT_IP', 'FORWARDED_FOR', 'FORWARDED', 'VIA', 'REMOTE_ADDR'] for name in names: vs = self.get_header(name, '') if vs: r.extend(map(lambda v: v.strip(), vs.split(','))) vs = self.environ.get(name, '') if vs: r.extend(map(lambda v: v.strip(), vs.split(','))) return r
return all the possible ips of this request, this will include public and private ips
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L868-L884
Jaymon/endpoints
endpoints/http.py
Request.ip
def ip(self): """return the public ip address""" r = '' # this was compiled from here: # https://github.com/un33k/django-ipware # http://www.ietf.org/rfc/rfc3330.txt (IPv4) # http://www.ietf.org/rfc/rfc5156.txt (IPv6) # https://en.wikipedia.org/wiki/Reserved_IP_addresses format_regex = re.compile(r'\s') ip_regex = re.compile(r'^(?:{})'.format(r'|'.join([ r'0\.', # reserved for 'self-identification' r'10\.', # class A r'169\.254', # link local block r'172\.(?:1[6-9]|2[0-9]|3[0-1])\.', # class B r'192\.0\.2\.', # documentation/examples r'192\.168', # class C r'255\.{3}', # broadcast address r'2001\:db8', # documentation/examples r'fc00\:', # private r'fe80\:', # link local unicast r'ff00\:', # multicast r'127\.', # localhost r'\:\:1' # localhost ]))) ips = self.ips for ip in ips: if not format_regex.search(ip) and not ip_regex.match(ip): r = ip break return r
python
def ip(self): """return the public ip address""" r = '' # this was compiled from here: # https://github.com/un33k/django-ipware # http://www.ietf.org/rfc/rfc3330.txt (IPv4) # http://www.ietf.org/rfc/rfc5156.txt (IPv6) # https://en.wikipedia.org/wiki/Reserved_IP_addresses format_regex = re.compile(r'\s') ip_regex = re.compile(r'^(?:{})'.format(r'|'.join([ r'0\.', # reserved for 'self-identification' r'10\.', # class A r'169\.254', # link local block r'172\.(?:1[6-9]|2[0-9]|3[0-1])\.', # class B r'192\.0\.2\.', # documentation/examples r'192\.168', # class C r'255\.{3}', # broadcast address r'2001\:db8', # documentation/examples r'fc00\:', # private r'fe80\:', # link local unicast r'ff00\:', # multicast r'127\.', # localhost r'\:\:1' # localhost ]))) ips = self.ips for ip in ips: if not format_regex.search(ip) and not ip_regex.match(ip): r = ip break return r
return the public ip address
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L887-L919
Jaymon/endpoints
endpoints/http.py
Request.url
def url(self): """return the full request url as an Url() instance""" scheme = self.scheme host = self.host path = self.path query = self.query port = self.port # normalize the port host_domain, host_port = Url.split_hostname_from_port(host) if host_port: port = host_port controller_path = "" if self.controller_info: controller_path = self.controller_info.get("path", "") u = Url( scheme=scheme, hostname=host, path=path, query=query, port=port, controller_path=controller_path, ) return u
python
def url(self): """return the full request url as an Url() instance""" scheme = self.scheme host = self.host path = self.path query = self.query port = self.port # normalize the port host_domain, host_port = Url.split_hostname_from_port(host) if host_port: port = host_port controller_path = "" if self.controller_info: controller_path = self.controller_info.get("path", "") u = Url( scheme=scheme, hostname=host, path=path, query=query, port=port, controller_path=controller_path, ) return u
return the full request url as an Url() instance
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L943-L968
Jaymon/endpoints
endpoints/http.py
Request.path
def path(self): """path part of a url (eg, http://host.com/path?query=string)""" self._path = '' path_args = self.path_args path = "/{}".format("/".join(path_args)) return path
python
def path(self): """path part of a url (eg, http://host.com/path?query=string)""" self._path = '' path_args = self.path_args path = "/{}".format("/".join(path_args)) return path
path part of a url (eg, http://host.com/path?query=string)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L971-L976
Jaymon/endpoints
endpoints/http.py
Request.path_args
def path_args(self): """the path converted to list (eg /foo/bar becomes [foo, bar])""" self._path_args = [] path = self.path path_args = list(filter(None, path.split('/'))) return path_args
python
def path_args(self): """the path converted to list (eg /foo/bar becomes [foo, bar])""" self._path_args = [] path = self.path path_args = list(filter(None, path.split('/'))) return path_args
the path converted to list (eg /foo/bar becomes [foo, bar])
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L979-L984
Jaymon/endpoints
endpoints/http.py
Request.query
def query(self): """query_string part of a url (eg, http://host.com/path?query=string)""" self._query = query = "" query_kwargs = self.query_kwargs if query_kwargs: query = urlencode(query_kwargs, doseq=True) return query
python
def query(self): """query_string part of a url (eg, http://host.com/path?query=string)""" self._query = query = "" query_kwargs = self.query_kwargs if query_kwargs: query = urlencode(query_kwargs, doseq=True) return query
query_string part of a url (eg, http://host.com/path?query=string)
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L987-L993
Jaymon/endpoints
endpoints/http.py
Request.query_kwargs
def query_kwargs(self): """{foo: bar, baz: che}""" self._query_kwargs = query_kwargs = {} query = self.query if query: query_kwargs = self._parse_query_str(query) return query_kwargs
python
def query_kwargs(self): """{foo: bar, baz: che}""" self._query_kwargs = query_kwargs = {} query = self.query if query: query_kwargs = self._parse_query_str(query) return query_kwargs
{foo: bar, baz: che}
https://github.com/Jaymon/endpoints/blob/2f1c4ae2c69a168e69447d3d8395ada7becaa5fb/endpoints/http.py#L996-L1001