sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def romanNumeral(n):
"""
>>> romanNumeral(13)
'XIII'
>>> romanNumeral(2944)
'MMCMXLIV'
"""
if 0 > n > 4000: raise ValueError('``n`` must lie between 1 and 3999: %d' % n)
roman = 'I IV V IX X XL L XC C CD D CM M'.split()
arabic = [1, 4, 5, 9, 10, 40, 50, 90, 100, 400, 500, 900, 1000]
res = []
while n>0:
pos = bisect.bisect_right(arabic, n)-1
fit = n//arabic[pos]
res.append(roman[pos]*fit); n -= fit * arabic[pos]
return "".join(res) | >>> romanNumeral(13)
'XIII'
>>> romanNumeral(2944)
'MMCMXLIV' | entailment |
def first(n, it, constructor=list):
"""
>>> first(3,iter([1,2,3,4]))
[1, 2, 3]
>>> first(3,iter([1,2,3,4]), iter) #doctest: +ELLIPSIS
<itertools.islice object at ...>
>>> first(3,iter([1,2,3,4]), tuple)
(1, 2, 3)
"""
return constructor(itertools.islice(it,n)) | >>> first(3,iter([1,2,3,4]))
[1, 2, 3]
>>> first(3,iter([1,2,3,4]), iter) #doctest: +ELLIPSIS
<itertools.islice object at ...>
>>> first(3,iter([1,2,3,4]), tuple)
(1, 2, 3) | entailment |
def drop(n, it, constructor=list):
"""
>>> first(10,drop(10,xrange(sys.maxint),iter))
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
"""
return constructor(itertools.islice(it,n,None)) | >>> first(10,drop(10,xrange(sys.maxint),iter))
[10, 11, 12, 13, 14, 15, 16, 17, 18, 19] | entailment |
def run(self, func, *args, **kwargs):
"""Same as ``self.dryRun`` if ``self.dry``, else same as ``self.wetRun``."""
if self.dry:
return self.dryRun(func, *args, **kwargs)
else:
return self.wetRun(func, *args, **kwargs) | Same as ``self.dryRun`` if ``self.dry``, else same as ``self.wetRun``. | entailment |
def dryRun(self, func, *args, **kwargs):
"""Instead of running function with `*args` and `**kwargs`, just print
out the function call."""
print >> self.out, \
self.formatterDict.get(func, self.defaultFormatter)(func, *args, **kwargs) | Instead of running function with `*args` and `**kwargs`, just print
out the function call. | entailment |
def iterbridges():
''' Iterate over all the bridges in the system. '''
net_files = os.listdir(SYSFS_NET_PATH)
for d in net_files:
path = os.path.join(SYSFS_NET_PATH, d)
if not os.path.isdir(path):
continue
if os.path.exists(os.path.join(path, b"bridge")):
yield Bridge(d) | Iterate over all the bridges in the system. | entailment |
def addbr(name):
''' Create new bridge with the given name '''
fcntl.ioctl(ifconfig.sockfd, SIOCBRADDBR, name)
return Bridge(name) | Create new bridge with the given name | entailment |
def iterifs(self):
''' Iterate over all the interfaces in this bridge. '''
if_path = os.path.join(SYSFS_NET_PATH, self.name, b"brif")
net_files = os.listdir(if_path)
for iface in net_files:
yield iface | Iterate over all the interfaces in this bridge. | entailment |
def addif(self, iface):
''' Add the interface with the given name to this bridge. Equivalent to
brctl addif [bridge] [interface]. '''
if type(iface) == ifconfig.Interface:
devindex = iface.index
else:
devindex = ifconfig.Interface(iface).index
ifreq = struct.pack('16si', self.name, devindex)
fcntl.ioctl(ifconfig.sockfd, SIOCBRADDIF, ifreq)
return self | Add the interface with the given name to this bridge. Equivalent to
brctl addif [bridge] [interface]. | entailment |
def delif(self, iface):
''' Remove the interface with the given name from this bridge.
Equivalent to brctl delif [bridge] [interface]'''
if type(iface) == ifconfig.Interface:
devindex = iface.index
else:
devindex = ifconfig.Interface(iface).index
ifreq = struct.pack('16si', self.name, devindex)
fcntl.ioctl(ifconfig.sockfd, SIOCBRDELIF, ifreq)
return self | Remove the interface with the given name from this bridge.
Equivalent to brctl delif [bridge] [interface] | entailment |
def delete(self):
''' Brings down the bridge interface, and removes it. Equivalent to
ifconfig [bridge] down && brctl delbr [bridge]. '''
self.down()
fcntl.ioctl(ifconfig.sockfd, SIOCBRDELBR, self.name)
return self | Brings down the bridge interface, and removes it. Equivalent to
ifconfig [bridge] down && brctl delbr [bridge]. | entailment |
def _get_random_id():
""" Get a random (i.e., unique) string identifier"""
symbols = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.choice(symbols) for _ in range(15)) | Get a random (i.e., unique) string identifier | entailment |
def get_lib_filename(category, name):
""" Get a filename of a built-in library file. """
base_dir = os.path.dirname(os.path.abspath(__file__))
if category == 'js':
filename = os.path.join('js', '{0}.js'.format(name))
elif category == 'css':
filename = os.path.join('css', '{0}.css'.format(name))
elif category == 'html':
filename = os.path.join('html', '{0}.html'.format(name))
else:
raise ValueError("Unknown category")
return os.path.join(base_dir, 'lib', filename) | Get a filename of a built-in library file. | entailment |
def output_notebook(
d3js_url="//d3js.org/d3.v3.min",
requirejs_url="//cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js",
html_template=None
):
""" Import required Javascript libraries to Jupyter Notebook. """
if html_template is None:
html_template = read_lib('html', 'setup')
setup_html = populate_template(
html_template,
d3js=d3js_url,
requirejs=requirejs_url
)
display_html(setup_html)
return | Import required Javascript libraries to Jupyter Notebook. | entailment |
def create_graph_html(js_template, css_template, html_template=None):
""" Create HTML code block given the graph Javascript and CSS. """
if html_template is None:
html_template = read_lib('html', 'graph')
# Create div ID for the graph and give it to the JS and CSS templates so
# they can reference the graph.
graph_id = 'graph-{0}'.format(_get_random_id())
js = populate_template(js_template, graph_id=graph_id)
css = populate_template(css_template, graph_id=graph_id)
return populate_template(
html_template,
graph_id=graph_id,
css=css,
js=js
) | Create HTML code block given the graph Javascript and CSS. | entailment |
def get_newest_possible_languagetool_version():
"""Return newest compatible version.
>>> version = get_newest_possible_languagetool_version()
>>> version in [JAVA_6_COMPATIBLE_VERSION, LATEST_VERSION]
True
"""
java_path = find_executable('java')
if not java_path:
# Just ignore this and assume an old version of Java. It might not be
# found because of a PATHEXT-related issue
# (https://bugs.python.org/issue2200).
return JAVA_6_COMPATIBLE_VERSION
output = subprocess.check_output([java_path, '-version'],
stderr=subprocess.STDOUT,
universal_newlines=True)
# https://www.oracle.com/technetwork/java/javase/versioning-naming-139433.html
match = re.search(
r'^java version "(?P<major1>\d+)\.(?P<major2>\d+)\.[^"]+"$',
output,
re.MULTILINE)
if not match:
raise SystemExit(
'Could not parse Java version from """{}""".'.format(output))
java_version = (int(match.group('major1')), int(match.group('major2')))
if java_version >= (1, 7):
return LATEST_VERSION
elif java_version >= (1, 6):
warn('grammar-check would be able to use a newer version of '
'LanguageTool if you had Java 7 or newer installed')
return JAVA_6_COMPATIBLE_VERSION
else:
raise SystemExit(
'You need at least Java 6 to use grammar-check') | Return newest compatible version.
>>> version = get_newest_possible_languagetool_version()
>>> version in [JAVA_6_COMPATIBLE_VERSION, LATEST_VERSION]
True | entailment |
def iterifs(physical=True):
''' Iterate over all the interfaces in the system. If physical is
true, then return only real physical interfaces (not 'lo', etc).'''
net_files = os.listdir(SYSFS_NET_PATH)
interfaces = set()
virtual = set()
for d in net_files:
path = os.path.join(SYSFS_NET_PATH, d)
if not os.path.isdir(path):
continue
if not os.path.exists(os.path.join(path, b"device")):
virtual.add(d)
interfaces.add(d)
# Some virtual interfaces don't show up in the above search, for example,
# subinterfaces (e.g. eth0:1). To find those, we have to do an ioctl
if not physical:
# ifconfig gets a max of 30 interfaces. Good enough for us too.
ifreqs = array.array("B", b"\x00" * SIZE_OF_IFREQ * 30)
buf_addr, _buf_len = ifreqs.buffer_info()
ifconf = struct.pack("iP", SIZE_OF_IFREQ * 30, buf_addr)
ifconf_res = fcntl.ioctl(sockfd, SIOCGIFCONF, ifconf)
ifreqs_len, _ = struct.unpack("iP", ifconf_res)
assert ifreqs_len % SIZE_OF_IFREQ == 0, (
"Unexpected amount of data returned from ioctl. "
"You're probably running on an unexpected architecture")
res = ifreqs.tostring()
for i in range(0, ifreqs_len, SIZE_OF_IFREQ):
d = res[i:i+16].strip(b'\0')
interfaces.add(d)
results = interfaces - virtual if physical else interfaces
for d in results:
yield Interface(d) | Iterate over all the interfaces in the system. If physical is
true, then return only real physical interfaces (not 'lo', etc). | entailment |
def init():
''' Initialize the library '''
globals()["sock"] = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
globals()["sockfd"] = globals()["sock"].fileno() | Initialize the library | entailment |
def up(self):
''' Bring up the bridge interface. Equivalent to ifconfig [iface] up. '''
# Get existing device flags
ifreq = struct.pack('16sh', self.name, 0)
flags = struct.unpack('16sh', fcntl.ioctl(sockfd, SIOCGIFFLAGS, ifreq))[1]
# Set new flags
flags = flags | IFF_UP
ifreq = struct.pack('16sh', self.name, flags)
fcntl.ioctl(sockfd, SIOCSIFFLAGS, ifreq) | Bring up the bridge interface. Equivalent to ifconfig [iface] up. | entailment |
def is_up(self):
''' Return True if the interface is up, False otherwise. '''
# Get existing device flags
ifreq = struct.pack('16sh', self.name, 0)
flags = struct.unpack('16sh', fcntl.ioctl(sockfd, SIOCGIFFLAGS, ifreq))[1]
# Set new flags
if flags & IFF_UP:
return True
else:
return False | Return True if the interface is up, False otherwise. | entailment |
def get_mac(self):
''' Obtain the device's mac address. '''
ifreq = struct.pack('16sH14s', self.name, AF_UNIX, b'\x00'*14)
res = fcntl.ioctl(sockfd, SIOCGIFHWADDR, ifreq)
address = struct.unpack('16sH14s', res)[2]
mac = struct.unpack('6B8x', address)
return ":".join(['%02X' % i for i in mac]) | Obtain the device's mac address. | entailment |
def set_mac(self, newmac):
''' Set the device's mac address. Device must be down for this to
succeed. '''
macbytes = [int(i, 16) for i in newmac.split(':')]
ifreq = struct.pack('16sH6B8x', self.name, AF_UNIX, *macbytes)
fcntl.ioctl(sockfd, SIOCSIFHWADDR, ifreq) | Set the device's mac address. Device must be down for this to
succeed. | entailment |
def get_index(self):
''' Convert an interface name to an index value. '''
ifreq = struct.pack('16si', self.name, 0)
res = fcntl.ioctl(sockfd, SIOCGIFINDEX, ifreq)
return struct.unpack("16si", res)[1] | Convert an interface name to an index value. | entailment |
def set_pause_param(self, autoneg, rx_pause, tx_pause):
"""
Ethernet has flow control! The inter-frame pause can be adjusted, by
auto-negotiation through an ethernet frame type with a simple two-field
payload, and by setting it explicitly.
http://en.wikipedia.org/wiki/Ethernet_flow_control
"""
# create a struct ethtool_pauseparm
# create a struct ifreq with its .ifr_data pointing at the above
ecmd = array.array('B', struct.pack('IIII',
ETHTOOL_SPAUSEPARAM, bool(autoneg), bool(rx_pause), bool(tx_pause)))
buf_addr, _buf_len = ecmd.buffer_info()
ifreq = struct.pack('16sP', self.name, buf_addr)
fcntl.ioctl(sockfd, SIOCETHTOOL, ifreq) | Ethernet has flow control! The inter-frame pause can be adjusted, by
auto-negotiation through an ethernet frame type with a simple two-field
payload, and by setting it explicitly.
http://en.wikipedia.org/wiki/Ethernet_flow_control | entailment |
def get_version():
"""Return version string."""
with io.open('grammar_check/__init__.py', encoding='utf-8') as input_file:
for line in input_file:
if line.startswith('__version__'):
return ast.parse(line).body[0].value.s | Return version string. | entailment |
def split_elements(value):
"""Split a string with comma or space-separated elements into a list."""
l = [v.strip() for v in value.split(',')]
if len(l) == 1:
l = value.split()
return l | Split a string with comma or space-separated elements into a list. | entailment |
def generate_py2k(config, py2k_dir=PY2K_DIR, run_tests=False):
"""Generate Python 2 code from Python 3 code."""
def copy(src, dst):
if (not os.path.isfile(dst) or
os.path.getmtime(src) > os.path.getmtime(dst)):
shutil.copy(src, dst)
return dst
return None
def copy_data(src, dst):
if (not os.path.isfile(dst) or
os.path.getmtime(src) > os.path.getmtime(dst) or
os.path.getsize(src) != os.path.getsize(dst)):
shutil.copy(src, dst)
return dst
return None
copied_py_files = []
test_scripts = []
if not os.path.isdir(py2k_dir):
os.makedirs(py2k_dir)
packages_root = get_cfg_value(config, 'files', 'packages_root')
for name in get_cfg_value(config, 'files', 'packages'):
name = name.replace('.', os.path.sep)
py3k_path = os.path.join(packages_root, name)
py2k_path = os.path.join(py2k_dir, py3k_path)
if not os.path.isdir(py2k_path):
os.makedirs(py2k_path)
for fn in os.listdir(py3k_path):
path = os.path.join(py3k_path, fn)
if not os.path.isfile(path):
continue
if not os.path.splitext(path)[1].lower() == '.py':
continue
new_path = os.path.join(py2k_path, fn)
if copy(path, new_path):
copied_py_files.append(new_path)
for name in get_cfg_value(config, 'files', 'modules'):
name = name.replace('.', os.path.sep) + '.py'
py3k_path = os.path.join(packages_root, name)
py2k_path = os.path.join(py2k_dir, py3k_path)
dirname = os.path.dirname(py2k_path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if copy(py3k_path, py2k_path):
copied_py_files.append(py2k_path)
for name in get_cfg_value(config, 'files', 'scripts'):
py3k_path = os.path.join(packages_root, name)
py2k_path = os.path.join(py2k_dir, py3k_path)
dirname = os.path.dirname(py2k_path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
if copy(py3k_path, py2k_path):
copied_py_files.append(py2k_path)
setup_py_path = os.path.abspath(__file__)
for pattern in get_cfg_value(config, 'files', 'extra_files'):
for path in glob.glob(pattern):
if os.path.abspath(path) == setup_py_path:
continue
py2k_path = os.path.join(py2k_dir, path)
py2k_dirname = os.path.dirname(py2k_path)
if not os.path.isdir(py2k_dirname):
os.makedirs(py2k_dirname)
filename = os.path.split(path)[1]
ext = os.path.splitext(filename)[1].lower()
if ext == '.py':
if copy(path, py2k_path):
copied_py_files.append(py2k_path)
else:
copy_data(path, py2k_path)
if (os.access(py2k_path, os.X_OK) and
re.search(r"\btest\b|_test\b|\btest_", filename)):
test_scripts.append(py2k_path)
for package, patterns in get_package_data(
get_cfg_value(config, 'files', 'package_data')).items():
for pattern in patterns:
py3k_pattern = os.path.join(packages_root, package, pattern)
for py3k_path in glob.glob(py3k_pattern):
py2k_path = os.path.join(py2k_dir, py3k_path)
py2k_dirname = os.path.dirname(py2k_path)
if not os.path.isdir(py2k_dirname):
os.makedirs(py2k_dirname)
copy_data(py3k_path, py2k_path)
if copied_py_files:
copied_py_files.sort()
try:
run_3to2(copied_py_files)
write_py2k_header(copied_py_files)
except:
shutil.rmtree(py2k_dir)
raise
if run_tests:
for script in test_scripts:
subprocess.check_call([script]) | Generate Python 2 code from Python 3 code. | entailment |
def default_hook(config):
"""Default setup hook."""
if (any(arg.startswith('bdist') for arg in sys.argv) and
os.path.isdir(PY2K_DIR) != IS_PY2K and os.path.isdir(LIB_DIR)):
shutil.rmtree(LIB_DIR)
if IS_PY2K and any(arg.startswith('install') or
arg.startswith('build') or
arg.startswith('bdist') for arg in sys.argv):
generate_py2k(config)
packages_root = get_cfg_value(config, 'files', 'packages_root')
packages_root = os.path.join(PY2K_DIR, packages_root)
set_cfg_value(config, 'files', 'packages_root', packages_root) | Default setup hook. | entailment |
def get_default_if():
""" Returns the default interface """
f = open ('/proc/net/route', 'r')
for line in f:
words = line.split()
dest = words[1]
try:
if (int (dest) == 0):
interf = words[0]
break
except ValueError:
pass
return interf | Returns the default interface | entailment |
def get_default_gw():
""" Returns the default gateway """
octet_list = []
gw_from_route = None
f = open ('/proc/net/route', 'r')
for line in f:
words = line.split()
dest = words[1]
try:
if (int (dest) == 0):
gw_from_route = words[2]
break
except ValueError:
pass
if not gw_from_route:
return None
for i in range(8, 1, -2):
octet = gw_from_route[i-2:i]
octet = int(octet, 16)
octet_list.append(str(octet))
gw_ip = ".".join(octet_list)
return gw_ip | Returns the default gateway | entailment |
def init(init_type='plaintext_tcp', *args, **kwargs):
"""
Create the module instance of the GraphiteClient.
"""
global _module_instance
reset()
validate_init_types = ['plaintext_tcp', 'plaintext', 'pickle_tcp',
'pickle', 'plain']
if init_type not in validate_init_types:
raise GraphiteSendException(
"Invalid init_type '%s', must be one of: %s" %
(init_type, ", ".join(validate_init_types)))
# Use TCP to send data to the plain text receiver on the graphite server.
if init_type in ['plaintext_tcp', 'plaintext', 'plain']:
_module_instance = GraphiteClient(*args, **kwargs)
# Use TCP to send pickled data to the pickle receiver on the graphite
# server.
if init_type in ['pickle_tcp', 'pickle']:
_module_instance = GraphitePickleClient(*args, **kwargs)
return _module_instance | Create the module instance of the GraphiteClient. | entailment |
def send(*args, **kwargs):
""" Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method.
"""
if not _module_instance:
raise GraphiteSendException(
"Must call graphitesend.init() before sending")
_module_instance.send(*args, **kwargs)
return _module_instance | Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method. | entailment |
def send_dict(*args, **kwargs):
""" Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method.
"""
if not _module_instance:
raise GraphiteSendException(
"Must call graphitesend.init() before sending")
_module_instance.send_dict(*args, **kwargs)
return _module_instance | Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method. | entailment |
def send_list(*args, **kwargs):
""" Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method.
"""
if not _module_instance:
raise GraphiteSendException(
"Must call graphitesend.init() before sending")
_module_instance.send_list(*args, **kwargs)
return _module_instance | Make sure that we have an instance of the GraphiteClient.
Then send the metrics to the graphite server.
User consumable method. | entailment |
def cli():
""" Allow the module to be called from the cli. """
import argparse
parser = argparse.ArgumentParser(description='Send data to graphite')
# Core of the application is to accept a metric and a value.
parser.add_argument('metric', metavar='metric', type=str,
help='name.of.metric')
parser.add_argument('value', metavar='value', type=int,
help='value of metric as int')
args = parser.parse_args()
metric = args.metric
value = args.value
graphitesend_instance = init()
graphitesend_instance.send(metric, value) | Allow the module to be called from the cli. | entailment |
def connect(self):
"""
Make a TCP connection to the graphite server on port self.port
"""
self.socket = socket.socket()
self.socket.settimeout(self.timeout_in_seconds)
try:
self.socket.connect(self.addr)
except socket.timeout:
raise GraphiteSendException(
"Took over %d second(s) to connect to %s" %
(self.timeout_in_seconds, self.addr))
except socket.gaierror:
raise GraphiteSendException(
"No address associated with hostname %s:%s" % self.addr)
except Exception as error:
raise GraphiteSendException(
"unknown exception while connecting to %s - %s" %
(self.addr, error)
)
return self.socket | Make a TCP connection to the graphite server on port self.port | entailment |
def autoreconnect(self, sleep=1, attempt=3, exponential=True, jitter=5):
"""
Tries to reconnect with some delay:
exponential=False: up to `attempt` times with `sleep` seconds between
each try
exponential=True: up to `attempt` times with exponential growing `sleep`
and random delay in range 1..`jitter` (exponential backoff)
:param sleep: time to sleep between two attempts to reconnect
:type sleep: float or int
:param attempt: maximal number of attempts
:type attempt: int
:param exponential: if set - use exponential backoff logic
:type exponential: bool
:param jitter: top value of random delay, sec
:type jitter: int
"""
p = 0
while attempt is None or attempt > 0:
try:
self.reconnect()
return True
except GraphiteSendException:
if exponential:
p += 1
time.sleep(pow(sleep, p) + random.randint(1, jitter))
else:
time.sleep(sleep)
attempt -= 1
return False | Tries to reconnect with some delay:
exponential=False: up to `attempt` times with `sleep` seconds between
each try
exponential=True: up to `attempt` times with exponential growing `sleep`
and random delay in range 1..`jitter` (exponential backoff)
:param sleep: time to sleep between two attempts to reconnect
:type sleep: float or int
:param attempt: maximal number of attempts
:type attempt: int
:param exponential: if set - use exponential backoff logic
:type exponential: bool
:param jitter: top value of random delay, sec
:type jitter: int | entailment |
def disconnect(self):
"""
Close the TCP connection with the graphite server.
"""
try:
self.socket.shutdown(1)
# If its currently a socket, set it to None
except AttributeError:
self.socket = None
except Exception:
self.socket = None
# Set the self.socket to None, no matter what.
finally:
self.socket = None | Close the TCP connection with the graphite server. | entailment |
def _dispatch_send(self, message):
"""
Dispatch the different steps of sending
"""
if self.dryrun:
return message
if not self.socket:
raise GraphiteSendException(
"Socket was not created before send"
)
sending_function = self._send
if self._autoreconnect:
sending_function = self._send_and_reconnect
try:
if self.asynchronous and gevent:
gevent.spawn(sending_function, message)
else:
sending_function(message)
except Exception as e:
self._handle_send_error(e)
return "sent {0} long message: {1}".format(len(message), message[:75]) | Dispatch the different steps of sending | entailment |
def _send_and_reconnect(self, message):
"""Send _message_ to Graphite Server and attempt reconnect on failure.
If _autoreconnect_ was specified, attempt to reconnect if first send
fails.
:raises AttributeError: When the socket has not been set.
:raises socket.error: When the socket connection is no longer valid.
"""
try:
self.socket.sendall(message.encode("ascii"))
except (AttributeError, socket.error):
if not self.autoreconnect():
raise
else:
self.socket.sendall(message.encode("ascii")) | Send _message_ to Graphite Server and attempt reconnect on failure.
If _autoreconnect_ was specified, attempt to reconnect if first send
fails.
:raises AttributeError: When the socket has not been set.
:raises socket.error: When the socket connection is no longer valid. | entailment |
def send(self, metric, value, timestamp=None, formatter=None):
"""
Format a single metric/value pair, and send it to the graphite
server.
:param metric: name of the metric
:type prefix: string
:param value: value of the metric
:type prefix: float or int
:param timestmap: epoch time of the event
:type prefix: float or int
:param formatter: option non-default formatter
:type prefix: callable
.. code-block:: python
>>> g = init()
>>> g.send("metric", 54)
.. code-block:: python
>>> g = init()
>>> g.send(metric="metricname", value=73)
"""
if formatter is None:
formatter = self.formatter
message = formatter(metric, value, timestamp)
message = self. _presend(message)
return self._dispatch_send(message) | Format a single metric/value pair, and send it to the graphite
server.
:param metric: name of the metric
:type prefix: string
:param value: value of the metric
:type prefix: float or int
:param timestmap: epoch time of the event
:type prefix: float or int
:param formatter: option non-default formatter
:type prefix: callable
.. code-block:: python
>>> g = init()
>>> g.send("metric", 54)
.. code-block:: python
>>> g = init()
>>> g.send(metric="metricname", value=73) | entailment |
def send_dict(self, data, timestamp=None, formatter=None):
"""
Format a dict of metric/values pairs, and send them all to the
graphite server.
:param data: key,value pair of metric name and metric value
:type prefix: dict
:param timestmap: epoch time of the event
:type prefix: float or int
:param formatter: option non-default formatter
:type prefix: callable
.. code-block:: python
>>> g = init()
>>> g.send_dict({'metric1': 54, 'metric2': 43, 'metricN': 999})
"""
if formatter is None:
formatter = self.formatter
metric_list = []
for metric, value in data.items():
tmp_message = formatter(metric, value, timestamp)
metric_list.append(tmp_message)
message = "".join(metric_list)
return self._dispatch_send(message) | Format a dict of metric/values pairs, and send them all to the
graphite server.
:param data: key,value pair of metric name and metric value
:type prefix: dict
:param timestmap: epoch time of the event
:type prefix: float or int
:param formatter: option non-default formatter
:type prefix: callable
.. code-block:: python
>>> g = init()
>>> g.send_dict({'metric1': 54, 'metric2': 43, 'metricN': 999}) | entailment |
def enable_asynchronous(self):
"""Check if socket have been monkey patched by gevent"""
def is_monkey_patched():
try:
from gevent import monkey, socket
except ImportError:
return False
if hasattr(monkey, "saved"):
return "socket" in monkey.saved
return gevent.socket.socket == socket.socket
if not is_monkey_patched():
raise Exception("To activate asynchonoucity, please monkey patch"
" the socket module with gevent")
return True | Check if socket have been monkey patched by gevent | entailment |
def str2listtuple(self, string_message):
"Covert a string that is ready to be sent to graphite into a tuple"
if type(string_message).__name__ not in ('str', 'unicode'):
raise TypeError("Must provide a string or unicode")
if not string_message.endswith('\n'):
string_message += "\n"
tpl_list = []
for line in string_message.split('\n'):
line = line.strip()
if not line:
continue
path, metric, timestamp = (None, None, None)
try:
(path, metric, timestamp) = line.split()
except ValueError:
raise ValueError(
"message must contain - metric_name, value and timestamp '%s'"
% line)
try:
timestamp = float(timestamp)
except ValueError:
raise ValueError("Timestamp must be float or int")
tpl_list.append((path, (timestamp, metric)))
if len(tpl_list) == 0:
raise GraphiteSendException("No messages to send")
payload = pickle.dumps(tpl_list)
header = struct.pack("!L", len(payload))
message = header + payload
return message | Covert a string that is ready to be sent to graphite into a tuple | entailment |
def _send(self, message):
""" Given a message send it to the graphite server. """
# An option to lowercase the entire message
if self.lowercase_metric_names:
message = message.lower()
# convert the message into a pickled payload.
message = self.str2listtuple(message)
try:
self.socket.sendall(message)
# Capture missing socket.
except socket.gaierror as error:
raise GraphiteSendException(
"Failed to send data to %s, with error: %s" %
(self.addr, error)) # noqa
# Capture socket closure before send.
except socket.error as error:
raise GraphiteSendException(
"Socket closed before able to send data to %s, "
"with error: %s" %
(self.addr, error)) # noqa
except Exception as error:
raise GraphiteSendException(
"Unknown error while trying to send data down socket to %s, "
"error: %s" %
(self.addr, error)) # noqa
return "sent %d long pickled message" % len(message) | Given a message send it to the graphite server. | entailment |
def clean_metric_name(self, metric_name):
"""
Make sure the metric is free of control chars, spaces, tabs, etc.
"""
if not self._clean_metric_name:
return metric_name
metric_name = str(metric_name)
for _from, _to in self.cleaning_replacement_list:
metric_name = metric_name.replace(_from, _to)
return metric_name | Make sure the metric is free of control chars, spaces, tabs, etc. | entailment |
def _get_languages(cls) -> set:
"""Get supported languages (by querying the server)."""
if not cls._server_is_alive():
cls._start_server_on_free_port()
url = urllib.parse.urljoin(cls._url, 'Languages')
languages = set()
for e in cls._get_root(url, num_tries=1):
languages.add(e.get('abbr'))
languages.add(e.get('abbrWithVariant'))
return languages | Get supported languages (by querying the server). | entailment |
def _get_attrib(cls):
"""Get matches element attributes."""
if not cls._server_is_alive():
cls._start_server_on_free_port()
params = {'language': FAILSAFE_LANGUAGE, 'text': ''}
data = urllib.parse.urlencode(params).encode()
root = cls._get_root(cls._url, data, num_tries=1)
return root.attrib | Get matches element attributes. | entailment |
def get_form(self, request, obj=None, **kwargs):
"""
Patched method for PageAdmin.get_form.
Returns a page form without the base field 'meta_description' which is
overridden in djangocms-page-meta.
This is triggered in the page add view and in the change view if
the meta description of the page is empty.
"""
language = get_language_from_request(request, obj)
form = _BASE_PAGEADMIN__GET_FORM(self, request, obj, **kwargs)
if not obj or not obj.get_meta_description(language=language):
form.base_fields.pop('meta_description', None)
return form | Patched method for PageAdmin.get_form.
Returns a page form without the base field 'meta_description' which is
overridden in djangocms-page-meta.
This is triggered in the page add view and in the change view if
the meta description of the page is empty. | entailment |
def get_cache_key(page, language):
"""
Create the cache key for the current page and language
"""
from cms.cache import _get_cache_key
try:
site_id = page.node.site_id
except AttributeError: # CMS_3_4
site_id = page.site_id
return _get_cache_key('page_meta', page, language, site_id) | Create the cache key for the current page and language | entailment |
def get_page_meta(page, language):
"""
Retrieves all the meta information for the page in the given language
:param page: a Page instance
:param lang: a language code
:return: Meta instance
:type: object
"""
from django.core.cache import cache
from meta.views import Meta
from .models import PageMeta, TitleMeta
try:
meta_key = get_cache_key(page, language)
except AttributeError:
return None
gplus_server = 'https://plus.google.com'
meta = cache.get(meta_key)
if not meta:
meta = Meta()
title = page.get_title_obj(language)
meta.extra_custom_props = []
meta.title = page.get_page_title(language)
if not meta.title:
meta.title = page.get_title(language)
if title.meta_description:
meta.description = title.meta_description.strip()
try:
titlemeta = title.titlemeta
if titlemeta.description:
meta.description = titlemeta.description.strip()
if titlemeta.keywords:
meta.keywords = titlemeta.keywords.strip().split(',')
meta.locale = titlemeta.locale
meta.og_description = titlemeta.og_description.strip()
if not meta.og_description:
meta.og_description = meta.description
meta.twitter_description = titlemeta.twitter_description.strip()
if not meta.twitter_description:
meta.twitter_description = meta.description
meta.gplus_description = titlemeta.gplus_description.strip()
if not meta.gplus_description:
meta.gplus_description = meta.description
if titlemeta.image:
meta.image = title.titlemeta.image.canonical_url or title.titlemeta.image.url
for item in titlemeta.extra.all():
attribute = item.attribute
if not attribute:
attribute = item.DEFAULT_ATTRIBUTE
meta.extra_custom_props.append((attribute, item.name, item.value))
except (TitleMeta.DoesNotExist, AttributeError):
# Skipping title-level metas
if meta.description:
meta.og_description = meta.description
meta.twitter_description = meta.description
meta.gplus_description = meta.description
defaults = {
'object_type': meta_settings.FB_TYPE,
'og_type': meta_settings.FB_TYPE,
'og_app_id': meta_settings.FB_APPID,
'fb_pages': meta_settings.FB_PAGES,
'og_profile_id': meta_settings.FB_PROFILE_ID,
'og_publisher': meta_settings.FB_PUBLISHER,
'og_author_url': meta_settings.FB_AUTHOR_URL,
'twitter_type': meta_settings.TWITTER_TYPE,
'twitter_site': meta_settings.TWITTER_SITE,
'twitter_author': meta_settings.TWITTER_AUTHOR,
'gplus_type': meta_settings.GPLUS_TYPE,
'gplus_author': meta_settings.GPLUS_AUTHOR,
}
try:
pagemeta = page.pagemeta
meta.object_type = pagemeta.og_type
meta.og_type = pagemeta.og_type
meta.og_app_id = pagemeta.og_app_id
meta.fb_pages = pagemeta.fb_pages
meta.og_profile_id = pagemeta.og_author_fbid
meta.twitter_type = pagemeta.twitter_type
meta.twitter_site = pagemeta.twitter_site
meta.twitter_author = pagemeta.twitter_author
meta.gplus_type = pagemeta.gplus_type
meta.gplus_author = pagemeta.gplus_author
if meta.og_type == 'article':
meta.og_publisher = pagemeta.og_publisher
meta.og_author_url = pagemeta.og_author_url
try:
from djangocms_page_tags.utils import get_title_tags, get_page_tags
tags = list(get_title_tags(page, language))
tags += list(get_page_tags(page))
meta.tag = ','.join([tag.name for tag in tags])
except ImportError:
# djangocms-page-tags not available
pass
if not meta.image and pagemeta.image:
meta.image = pagemeta.image.canonical_url or pagemeta.image.url
for item in pagemeta.extra.all():
attribute = item.attribute
if not attribute:
attribute = item.DEFAULT_ATTRIBUTE
meta.extra_custom_props.append((attribute, item.name, item.value))
except PageMeta.DoesNotExist:
pass
if meta.gplus_author and not meta.gplus_author.startswith('http'):
if not meta.gplus_author.startswith('/'):
meta.gplus_author = '{0}/{1}'.format(gplus_server, meta.gplus_author)
else:
meta.gplus_author = '{0}{1}'.format(gplus_server, meta.gplus_author)
if page.publication_date:
meta.published_time = page.publication_date.isoformat()
if page.changed_date:
meta.modified_time = page.changed_date.isoformat()
if page.publication_end_date:
meta.expiration_time = page.publication_end_date.isoformat()
for attr, val in defaults.items():
if not getattr(meta, attr, '') and val:
setattr(meta, attr, val)
meta.url = page.get_absolute_url(language)
return meta | Retrieves all the meta information for the page in the given language
:param page: a Page instance
:param lang: a language code
:return: Meta instance
:type: object | entailment |
def begin(self, address=MPR121_I2CADDR_DEFAULT, i2c=None, **kwargs):
"""Initialize communication with the MPR121.
Can specify a custom I2C address for the device using the address
parameter (defaults to 0x5A). Optional i2c parameter allows specifying a
custom I2C bus source (defaults to platform's I2C bus).
Returns True if communication with the MPR121 was established, otherwise
returns False.
"""
# Assume we're using platform's default I2C bus if none is specified.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
# Require repeated start conditions for I2C register reads. Unfortunately
# the MPR121 is very sensitive and requires repeated starts to read all
# the registers.
I2C.require_repeated_start()
# Save a reference to the I2C device instance for later communication.
self._device = i2c.get_i2c_device(address, **kwargs)
return self._reset() | Initialize communication with the MPR121.
Can specify a custom I2C address for the device using the address
parameter (defaults to 0x5A). Optional i2c parameter allows specifying a
custom I2C bus source (defaults to platform's I2C bus).
Returns True if communication with the MPR121 was established, otherwise
returns False. | entailment |
def set_thresholds(self, touch, release):
"""Set the touch and release threshold for all inputs to the provided
values. Both touch and release should be a value between 0 to 255
(inclusive).
"""
assert touch >= 0 and touch <= 255, 'touch must be between 0-255 (inclusive)'
assert release >= 0 and release <= 255, 'release must be between 0-255 (inclusive)'
# Set the touch and release register value for all the inputs.
for i in range(12):
self._i2c_retry(self._device.write8, MPR121_TOUCHTH_0 + 2*i, touch)
self._i2c_retry(self._device.write8, MPR121_RELEASETH_0 + 2*i, release) | Set the touch and release threshold for all inputs to the provided
values. Both touch and release should be a value between 0 to 255
(inclusive). | entailment |
def filtered_data(self, pin):
"""Return filtered data register value for the provided pin (0-11).
Useful for debugging.
"""
assert pin >= 0 and pin < 12, 'pin must be between 0-11 (inclusive)'
return self._i2c_retry(self._device.readU16LE, MPR121_FILTDATA_0L + pin*2) | Return filtered data register value for the provided pin (0-11).
Useful for debugging. | entailment |
def baseline_data(self, pin):
"""Return baseline data register value for the provided pin (0-11).
Useful for debugging.
"""
assert pin >= 0 and pin < 12, 'pin must be between 0-11 (inclusive)'
bl = self._i2c_retry(self._device.readU8, MPR121_BASELINE_0 + pin)
return bl << 2 | Return baseline data register value for the provided pin (0-11).
Useful for debugging. | entailment |
def touched(self):
"""Return touch state of all pins as a 12-bit value where each bit
represents a pin, with a value of 1 being touched and 0 not being touched.
"""
t = self._i2c_retry(self._device.readU16LE, MPR121_TOUCHSTATUS_L)
return t & 0x0FFF | Return touch state of all pins as a 12-bit value where each bit
represents a pin, with a value of 1 being touched and 0 not being touched. | entailment |
def is_touched(self, pin):
"""Return True if the specified pin is being touched, otherwise returns
False.
"""
assert pin >= 0 and pin < 12, 'pin must be between 0-11 (inclusive)'
t = self.touched()
return (t & (1 << pin)) > 0 | Return True if the specified pin is being touched, otherwise returns
False. | entailment |
def profileit(func):
"""
Decorator straight up stolen from stackoverflow
"""
def wrapper(*args, **kwargs):
datafn = func.__name__ + ".profile" # Name the data file sensibly
prof = cProfile.Profile()
prof.enable()
retval = prof.runcall(func, *args, **kwargs)
prof.disable()
stats = pstats.Stats(prof)
try:
stats.sort_stats('cumtime').print_stats()
except KeyError:
pass # breaks in python 2.6
return retval
return wrapper | Decorator straight up stolen from stackoverflow | entailment |
def filter_ip_range(ip_range):
"""Filter :class:`.Line` objects by IP range.
Both *192.168.1.203* and *192.168.1.10* are valid if the provided ip
range is ``192.168.1`` whereas *192.168.2.103* is not valid (note the
*.2.*).
:param ip_range: IP range that you want to filter to.
:type ip_range: string
:returns: a function that filters by the provided IP range.
:rtype: function
"""
def filter_func(log_line):
ip = log_line.get_ip()
if ip:
return ip.startswith(ip_range)
return filter_func | Filter :class:`.Line` objects by IP range.
Both *192.168.1.203* and *192.168.1.10* are valid if the provided ip
range is ``192.168.1`` whereas *192.168.2.103* is not valid (note the
*.2.*).
:param ip_range: IP range that you want to filter to.
:type ip_range: string
:returns: a function that filters by the provided IP range.
:rtype: function | entailment |
def filter_slow_requests(slowness):
"""Filter :class:`.Line` objects by their response time.
:param slowness: minimum time, in milliseconds, a server needs to answer
a request. If the server takes more time than that the log line is
accepted.
:type slowness: string
:returns: a function that filters by the server response time.
:rtype: function
"""
def filter_func(log_line):
slowness_int = int(slowness)
return slowness_int <= log_line.time_wait_response
return filter_func | Filter :class:`.Line` objects by their response time.
:param slowness: minimum time, in milliseconds, a server needs to answer
a request. If the server takes more time than that the log line is
accepted.
:type slowness: string
:returns: a function that filters by the server response time.
:rtype: function | entailment |
def filter_wait_on_queues(max_waiting):
"""Filter :class:`.Line` objects by their queueing time in
HAProxy.
:param max_waiting: maximum time, in milliseconds, a request is waiting on
HAProxy prior to be delivered to a backend server. If HAProxy takes less
than that time the log line is counted.
:type max_waiting: string
:returns: a function that filters by HAProxy queueing time.
:rtype: function
"""
def filter_func(log_line):
waiting = int(max_waiting)
return waiting >= log_line.time_wait_queues
return filter_func | Filter :class:`.Line` objects by their queueing time in
HAProxy.
:param max_waiting: maximum time, in milliseconds, a request is waiting on
HAProxy prior to be delivered to a backend server. If HAProxy takes less
than that time the log line is counted.
:type max_waiting: string
:returns: a function that filters by HAProxy queueing time.
:rtype: function | entailment |
def filter_time_frame(start, delta):
"""Filter :class:`.Line` objects by their connection time.
:param start: a time expression (see -s argument on --help for its format)
to filter log lines that are before this time.
:type start: string
:param delta: a relative time expression (see -s argument on --help for
its format) to limit the amount of time log lines will be considered.
:type delta: string
:returns: a function that filters by the time a request is made.
:rtype: function
"""
start_value = start
delta_value = delta
end_value = None
if start_value is not '':
start_value = _date_str_to_datetime(start_value)
if delta_value is not '':
delta_value = _delta_str_to_timedelta(delta_value)
if start_value is not '' and delta_value is not '':
end_value = start_value + delta_value
def filter_func(log_line):
if start_value is '':
return True
elif start_value > log_line.accept_date:
return False
if end_value is None:
return True
elif end_value < log_line.accept_date:
return False
return True
return filter_func | Filter :class:`.Line` objects by their connection time.
:param start: a time expression (see -s argument on --help for its format)
to filter log lines that are before this time.
:type start: string
:param delta: a relative time expression (see -s argument on --help for
its format) to limit the amount of time log lines will be considered.
:type delta: string
:returns: a function that filters by the time a request is made.
:rtype: function | entailment |
def filter_response_size(size):
"""Filter :class:`.Line` objects by the response size (in bytes).
Specially useful when looking for big file downloads.
:param size: Minimum amount of bytes a response body weighted.
:type size: string
:returns: a function that filters by the response size.
:rtype: function
"""
if size.startswith('+'):
size_value = int(size[1:])
else:
size_value = int(size)
def filter_func(log_line):
bytes_read = log_line.bytes_read
if bytes_read.startswith('+'):
bytes_read = int(bytes_read[1:])
else:
bytes_read = int(bytes_read)
return bytes_read >= size_value
return filter_func | Filter :class:`.Line` objects by the response size (in bytes).
Specially useful when looking for big file downloads.
:param size: Minimum amount of bytes a response body weighted.
:type size: string
:returns: a function that filters by the response size.
:rtype: function | entailment |
def _get_fields_for_model(model):
"""
Gets all of the fields on the model.
:param DeclarativeModel model: A SQLAlchemy ORM Model
:return: A tuple of the fields on the Model corresponding
to the columns on the Model.
:rtype: tuple
"""
fields = []
for name in model._sa_class_manager:
prop = getattr(model, name)
if isinstance(prop.property, RelationshipProperty):
for pk in prop.property.mapper.primary_key:
fields.append('{0}.{1}'.format(name, pk.name))
else:
fields.append(name)
return tuple(fields) | Gets all of the fields on the model.
:param DeclarativeModel model: A SQLAlchemy ORM Model
:return: A tuple of the fields on the Model corresponding
to the columns on the Model.
:rtype: tuple | entailment |
def _get_relationships(model):
"""
Gets the necessary relationships for the resource
by inspecting the sqlalchemy model for relationships.
:param DeclarativeMeta model: The SQLAlchemy ORM model.
:return: A tuple of Relationship/ListRelationship instances
corresponding to the relationships on the Model.
:rtype: tuple
"""
relationships = []
for name, relationship in inspect(model).relationships.items():
class_ = relationship.mapper.class_
if relationship.uselist:
rel = ListRelationship(name, relation=class_.__name__)
else:
rel = Relationship(name, relation=class_.__name__)
relationships.append(rel)
return tuple(relationships) | Gets the necessary relationships for the resource
by inspecting the sqlalchemy model for relationships.
:param DeclarativeMeta model: The SQLAlchemy ORM model.
:return: A tuple of Relationship/ListRelationship instances
corresponding to the relationships on the Model.
:rtype: tuple | entailment |
def create_resource(model, session_handler, resource_bases=(CRUDL,),
relationships=None, links=None, preprocessors=None,
postprocessors=None, fields=None, paginate_by=100,
auto_relationships=True, pks=None, create_fields=None,
update_fields=None, list_fields=None, append_slash=False):
"""
Creates a ResourceBase subclass by inspecting a SQLAlchemy
Model. This is somewhat more restrictive than explicitly
creating managers and resources. However, if you only need
any of the basic CRUD+L operations,
:param sqlalchemy.Model model: This is the model that
will be inspected to create a Resource and Manager from.
By default, all of it's fields will be exposed, although
this can be overridden using the fields attribute.
:param tuple resource_bases: A tuple of ResourceBase subclasses.
Defaults to the restmixins.CRUDL class only. However if you only
wanted Update and Delete you could pass in
```(restmixins.Update, restmixins.Delete)``` which
would cause the resource to inherit from those two.
Additionally, you could create your own mixins and pass them in
as the resource_bases
:param tuple relationships: extra relationships to pass
into the ResourceBase constructor. If auto_relationships
is set to True, then they will be appended to these relationships.
:param tuple links: Extra links to pass into the ResourceBase as
the class _links attribute. Defaults to an empty tuple.
:param tuple preprocessors: Preprocessors for the resource class attribute.
:param tuple postprocessors: Postprocessors for the resource class attribute.
:param ripozo_sqlalchemy.SessionHandler|ripozo_sqlalchemy.ScopedSessionHandler session_handler: A session handler
to use when instantiating an instance of the Manager class created
from the model. This is responsible for getting and handling
sessions in both normal cases and exceptions.
:param tuple fields: The fields to expose on the api. Defaults to
all of the fields on the model.
:param bool auto_relationships: If True, then the SQLAlchemy Model
will be inspected for relationships and they will be automatically
appended to the relationships on the resource class attribute.
:param list create_fields: A list of the fields that are valid when
creating a resource. By default this will be the fields without
any primary keys included
:param list update_fields: A list of the fields that are valid when
updating a resource. By default this will be the fields without
any primary keys included
:param list list_fields: A list of the fields that will be returned
when the list endpoint is requested. Defaults to the fields
attribute.
:param bool append_slash: A flag to forcibly append slashes to
the end of urls.
:return: A ResourceBase subclass and AlchemyManager subclass
:rtype: ResourceMetaClass
"""
relationships = relationships or tuple()
if auto_relationships:
relationships += _get_relationships(model)
links = links or tuple()
preprocessors = preprocessors or tuple()
postprocessors = postprocessors or tuple()
pks = pks or _get_pks(model)
fields = fields or _get_fields_for_model(model)
list_fields = list_fields or fields
create_fields = create_fields or [x for x in fields if x not in set(pks)]
update_fields = update_fields or [x for x in fields if x not in set(pks)]
manager_cls_attrs = dict(paginate_by=paginate_by, fields=fields, model=model,
list_fields=list_fields, create_fields=create_fields,
update_fields=update_fields)
manager_class = type(str(model.__name__), (AlchemyManager,), manager_cls_attrs)
manager = manager_class(session_handler)
resource_cls_attrs = dict(preprocessors=preprocessors,
postprocessors=postprocessors,
_relationships=relationships, _links=links,
pks=pks, manager=manager, append_slash=append_slash)
res_class = ResourceMetaClass(str(model.__name__), resource_bases, resource_cls_attrs)
return res_class | Creates a ResourceBase subclass by inspecting a SQLAlchemy
Model. This is somewhat more restrictive than explicitly
creating managers and resources. However, if you only need
any of the basic CRUD+L operations,
:param sqlalchemy.Model model: This is the model that
will be inspected to create a Resource and Manager from.
By default, all of it's fields will be exposed, although
this can be overridden using the fields attribute.
:param tuple resource_bases: A tuple of ResourceBase subclasses.
Defaults to the restmixins.CRUDL class only. However if you only
wanted Update and Delete you could pass in
```(restmixins.Update, restmixins.Delete)``` which
would cause the resource to inherit from those two.
Additionally, you could create your own mixins and pass them in
as the resource_bases
:param tuple relationships: extra relationships to pass
into the ResourceBase constructor. If auto_relationships
is set to True, then they will be appended to these relationships.
:param tuple links: Extra links to pass into the ResourceBase as
the class _links attribute. Defaults to an empty tuple.
:param tuple preprocessors: Preprocessors for the resource class attribute.
:param tuple postprocessors: Postprocessors for the resource class attribute.
:param ripozo_sqlalchemy.SessionHandler|ripozo_sqlalchemy.ScopedSessionHandler session_handler: A session handler
to use when instantiating an instance of the Manager class created
from the model. This is responsible for getting and handling
sessions in both normal cases and exceptions.
:param tuple fields: The fields to expose on the api. Defaults to
all of the fields on the model.
:param bool auto_relationships: If True, then the SQLAlchemy Model
will be inspected for relationships and they will be automatically
appended to the relationships on the resource class attribute.
:param list create_fields: A list of the fields that are valid when
creating a resource. By default this will be the fields without
any primary keys included
:param list update_fields: A list of the fields that are valid when
updating a resource. By default this will be the fields without
any primary keys included
:param list list_fields: A list of the fields that will be returned
when the list endpoint is requested. Defaults to the fields
attribute.
:param bool append_slash: A flag to forcibly append slashes to
the end of urls.
:return: A ResourceBase subclass and AlchemyManager subclass
:rtype: ResourceMetaClass | entailment |
def _is_pickle_valid(self):
"""Logic to decide if the file should be processed or just needs to
be loaded from its pickle data.
"""
if not os.path.exists(self._pickle_file):
return False
else:
file_mtime = os.path.getmtime(self.logfile)
pickle_mtime = os.path.getmtime(self._pickle_file)
if file_mtime > pickle_mtime:
return False
return True | Logic to decide if the file should be processed or just needs to
be loaded from its pickle data. | entailment |
def _load(self):
"""Load data from a pickle file. """
with open(self._pickle_file, 'rb') as source:
pickler = pickle.Unpickler(source)
for attribute in self._pickle_attributes:
pickle_data = pickler.load()
setattr(self, attribute, pickle_data) | Load data from a pickle file. | entailment |
def _save(self):
"""Save the attributes defined on _pickle_attributes in a pickle file.
This improves a lot the nth run as the log file does not need to be
processed every time.
"""
with open(self._pickle_file, 'wb') as source:
pickler = pickle.Pickler(source, pickle.HIGHEST_PROTOCOL)
for attribute in self._pickle_attributes:
attr = getattr(self, attribute, None)
pickler.dump(attr) | Save the attributes defined on _pickle_attributes in a pickle file.
This improves a lot the nth run as the log file does not need to be
processed every time. | entailment |
def parse_data(self, logfile):
"""Parse data from data stream and replace object lines.
:param logfile: [required] Log file data stream.
:type logfile: str
"""
for line in logfile:
stripped_line = line.strip()
parsed_line = Line(stripped_line)
if parsed_line.valid:
self._valid_lines.append(parsed_line)
else:
self._invalid_lines.append(stripped_line)
self.total_lines = len(self._valid_lines) + len(self._invalid_lines) | Parse data from data stream and replace object lines.
:param logfile: [required] Log file data stream.
:type logfile: str | entailment |
def filter(self, filter_func, reverse=False):
"""Filter current log lines by a given filter function.
This allows to drill down data out of the log file by filtering the
relevant log lines to analyze.
For example, filter by a given IP so only log lines for that IP are
further processed with commands (top paths, http status counter...).
:param filter_func: [required] Filter method, see filters.py for all
available filters.
:type filter_func: function
:param reverse: negate the filter (so accept all log lines that return
``False``).
:type reverse: boolean
:returns: a new instance of Log containing only log lines
that passed the filter function.
:rtype: :class:`Log`
"""
new_log_file = Log()
new_log_file.logfile = self.logfile
new_log_file.total_lines = 0
new_log_file._valid_lines = []
new_log_file._invalid_lines = self._invalid_lines[:]
# add the reverse conditional outside the loop to keep the loop as
# straightforward as possible
if not reverse:
for i in self._valid_lines:
if filter_func(i):
new_log_file.total_lines += 1
new_log_file._valid_lines.append(i)
else:
for i in self._valid_lines:
if not filter_func(i):
new_log_file.total_lines += 1
new_log_file._valid_lines.append(i)
return new_log_file | Filter current log lines by a given filter function.
This allows to drill down data out of the log file by filtering the
relevant log lines to analyze.
For example, filter by a given IP so only log lines for that IP are
further processed with commands (top paths, http status counter...).
:param filter_func: [required] Filter method, see filters.py for all
available filters.
:type filter_func: function
:param reverse: negate the filter (so accept all log lines that return
``False``).
:type reverse: boolean
:returns: a new instance of Log containing only log lines
that passed the filter function.
:rtype: :class:`Log` | entailment |
def commands(cls):
"""Returns a list of all methods that start with ``cmd_``."""
cmds = [cmd[4:] for cmd in dir(cls) if cmd.startswith('cmd_')]
return cmds | Returns a list of all methods that start with ``cmd_``. | entailment |
def cmd_http_methods(self):
"""Reports a breakdown of how many requests have been made per HTTP
method (GET, POST...).
"""
methods = defaultdict(int)
for line in self._valid_lines:
methods[line.http_request_method] += 1
return methods | Reports a breakdown of how many requests have been made per HTTP
method (GET, POST...). | entailment |
def cmd_ip_counter(self):
"""Reports a breakdown of how many requests have been made per IP.
.. note::
To enable this command requests need to provide a header with the
forwarded IP (usually X-Forwarded-For) and be it the only header
being captured.
"""
ip_counter = defaultdict(int)
for line in self._valid_lines:
ip = line.get_ip()
if ip is not None:
ip_counter[ip] += 1
return ip_counter | Reports a breakdown of how many requests have been made per IP.
.. note::
To enable this command requests need to provide a header with the
forwarded IP (usually X-Forwarded-For) and be it the only header
being captured. | entailment |
def cmd_status_codes_counter(self):
"""Generate statistics about HTTP status codes. 404, 500 and so on.
"""
status_codes = defaultdict(int)
for line in self._valid_lines:
status_codes[line.status_code] += 1
return status_codes | Generate statistics about HTTP status codes. 404, 500 and so on. | entailment |
def cmd_request_path_counter(self):
"""Generate statistics about HTTP requests' path."""
paths = defaultdict(int)
for line in self._valid_lines:
paths[line.http_request_path] += 1
return paths | Generate statistics about HTTP requests' path. | entailment |
def cmd_slow_requests(self):
"""List all requests that took a certain amount of time to be
processed.
.. warning::
By now hardcoded to 1 second (1000 milliseconds), improve the
command line interface to allow to send parameters to each command
or globally.
"""
slow_requests = [
line.time_wait_response
for line in self._valid_lines
if line.time_wait_response > 1000
]
return slow_requests | List all requests that took a certain amount of time to be
processed.
.. warning::
By now hardcoded to 1 second (1000 milliseconds), improve the
command line interface to allow to send parameters to each command
or globally. | entailment |
def cmd_average_response_time(self):
"""Returns the average response time of all, non aborted, requests."""
average = [
line.time_wait_response
for line in self._valid_lines
if line.time_wait_response >= 0
]
divisor = float(len(average))
if divisor > 0:
return sum(average) / float(len(average))
return 0 | Returns the average response time of all, non aborted, requests. | entailment |
def cmd_average_waiting_time(self):
"""Returns the average queue time of all, non aborted, requests."""
average = [
line.time_wait_queues
for line in self._valid_lines
if line.time_wait_queues >= 0
]
divisor = float(len(average))
if divisor > 0:
return sum(average) / float(len(average))
return 0 | Returns the average queue time of all, non aborted, requests. | entailment |
def cmd_server_load(self):
"""Generate statistics regarding how many requests were processed by
each downstream server.
"""
servers = defaultdict(int)
for line in self._valid_lines:
servers[line.server_name] += 1
return servers | Generate statistics regarding how many requests were processed by
each downstream server. | entailment |
def cmd_queue_peaks(self):
"""Generate a list of the requests peaks on the queue.
A queue peak is defined by the biggest value on the backend queue
on a series of log lines that are between log lines without being
queued.
.. warning::
Allow to configure up to which peak can be ignored. Currently
set to 1.
"""
threshold = 1
peaks = []
current_peak = 0
current_queue = 0
current_span = 0
first_on_queue = None
for line in self._valid_lines:
current_queue = line.queue_backend
if current_queue > 0:
current_span += 1
if first_on_queue is None:
first_on_queue = line.accept_date
if current_queue == 0 and current_peak > threshold:
data = {
'peak': current_peak,
'span': current_span,
'first': first_on_queue,
'last': line.accept_date,
}
peaks.append(data)
current_peak = 0
current_span = 0
first_on_queue = None
if current_queue > current_peak:
current_peak = current_queue
# case of a series that does not end
if current_queue > 0 and current_peak > threshold:
data = {
'peak': current_peak,
'span': current_span,
'first': first_on_queue,
'last': line.accept_date,
}
peaks.append(data)
return peaks | Generate a list of the requests peaks on the queue.
A queue peak is defined by the biggest value on the backend queue
on a series of log lines that are between log lines without being
queued.
.. warning::
Allow to configure up to which peak can be ignored. Currently
set to 1. | entailment |
def cmd_connection_type(self):
"""Generates statistics on how many requests are made via HTTP and how
many are made via SSL.
.. note::
This only works if the request path contains the default port for
SSL (443).
.. warning::
The ports are hardcoded, they should be configurable.
"""
https = 0
non_https = 0
for line in self._valid_lines:
if line.is_https():
https += 1
else:
non_https += 1
return https, non_https | Generates statistics on how many requests are made via HTTP and how
many are made via SSL.
.. note::
This only works if the request path contains the default port for
SSL (443).
.. warning::
The ports are hardcoded, they should be configurable. | entailment |
def cmd_requests_per_minute(self):
"""Generates statistics on how many requests were made per minute.
.. note::
Try to combine it with time constrains (``-s`` and ``-d``) as this
command output can be huge otherwise.
"""
if len(self._valid_lines) == 0:
return
current_minute = self._valid_lines[0].accept_date
current_minute_counter = 0
requests = []
one_minute = timedelta(minutes=1)
def format_and_append(append_to, date, counter):
seconds_and_micro = timedelta(
seconds=date.second,
microseconds=date.microsecond,
)
minute_formatted = date - seconds_and_micro
append_to.append((minute_formatted, counter))
# note that _valid_lines is kept sorted by date
for line in self._valid_lines:
line_date = line.accept_date
if line_date - current_minute < one_minute and \
line_date.minute == current_minute.minute:
current_minute_counter += 1
else:
format_and_append(
requests,
current_minute,
current_minute_counter,
)
current_minute_counter = 1
current_minute = line_date
if current_minute_counter > 0:
format_and_append(
requests,
current_minute,
current_minute_counter,
)
return requests | Generates statistics on how many requests were made per minute.
.. note::
Try to combine it with time constrains (``-s`` and ``-d``) as this
command output can be huge otherwise. | entailment |
def cmd_print(self):
"""Returns the raw lines to be printed."""
if not self._valid_lines:
return ''
return '\n'.join([line.raw_line for line in self._valid_lines]) + '\n' | Returns the raw lines to be printed. | entailment |
def _sort_lines(self):
"""Haproxy writes its logs after having gathered all information
related to each specific connection. A simple request can be
really quick but others can be really slow, thus even if one connection
is logged later, it could have been accepted before others that are
already processed and logged.
This method sorts all valid log lines by their acceptance date,
providing the real order in which connections where made to the server.
"""
self._valid_lines = sorted(
self._valid_lines,
key=lambda line: line.accept_date,
) | Haproxy writes its logs after having gathered all information
related to each specific connection. A simple request can be
really quick but others can be really slow, thus even if one connection
is logged later, it could have been accepted before others that are
already processed and logged.
This method sorts all valid log lines by their acceptance date,
providing the real order in which connections where made to the server. | entailment |
def _sort_and_trim(data, reverse=False):
"""Sorts a dictionary with at least two fields on each of them sorting
by the second element.
.. warning::
Right now is hardcoded to 10 elements, improve the command line
interface to allow to send parameters to each command or globally.
"""
threshold = 10
data_list = data.items()
data_list = sorted(
data_list,
key=lambda data_info: data_info[1],
reverse=reverse,
)
return data_list[:threshold] | Sorts a dictionary with at least two fields on each of them sorting
by the second element.
.. warning::
Right now is hardcoded to 10 elements, improve the command line
interface to allow to send parameters to each command or globally. | entailment |
def db_access_point(func):
"""
Wraps a function that actually accesses the database.
It injects a session into the method and attempts to handle
it after the function has run.
:param method func: The method that is interacting with the database.
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
"""
Wrapper responsible for handling
sessions
"""
session = self.session_handler.get_session()
try:
resp = func(self, session, *args, **kwargs)
except Exception as exc:
self.session_handler.handle_session(session, exc=exc)
raise exc
else:
self.session_handler.handle_session(session)
return resp
return wrapper | Wraps a function that actually accesses the database.
It injects a session into the method and attempts to handle
it after the function has run.
:param method func: The method that is interacting with the database. | entailment |
def _get_field_python_type(model, name):
"""
Gets the python type for the attribute on the model
with the name provided.
:param Model model: The SqlAlchemy model class.
:param unicode name: The column name on the model
that you are attempting to get the python type.
:return: The python type of the column
:rtype: type
"""
try:
return getattr(model, name).property.columns[0].type.python_type
except AttributeError: # It's a relationship
parts = name.split('.')
model = getattr(model, parts.pop(0)).comparator.mapper.class_
return AlchemyManager._get_field_python_type(model, '.'.join(parts))
except NotImplementedError:
# This is for pickle type columns.
return object | Gets the python type for the attribute on the model
with the name provided.
:param Model model: The SqlAlchemy model class.
:param unicode name: The column name on the model
that you are attempting to get the python type.
:return: The python type of the column
:rtype: type | entailment |
def get_field_type(cls, name):
"""
Takes a field name and gets an appropriate BaseField instance
for that column. It inspects the Model that is set on the manager
to determine what the BaseField subclass should be.
:param unicode name:
:return: A BaseField subclass that is appropriate for
translating a string input into the appropriate format.
:rtype: ripozo.viewsets.fields.base.BaseField
"""
python_type = cls._get_field_python_type(cls.model, name)
if python_type in _COLUMN_FIELD_MAP:
field_class = _COLUMN_FIELD_MAP[python_type]
return field_class(name)
return BaseField(name) | Takes a field name and gets an appropriate BaseField instance
for that column. It inspects the Model that is set on the manager
to determine what the BaseField subclass should be.
:param unicode name:
:return: A BaseField subclass that is appropriate for
translating a string input into the appropriate format.
:rtype: ripozo.viewsets.fields.base.BaseField | entailment |
def create(self, session, values, *args, **kwargs):
"""
Creates a new instance of the self.model
and persists it to the database.
:param dict values: The dictionary of values to
set on the model. The key is the column name
and the value is what it will be set to. If
the cls._create_fields is defined then it will
use those fields. Otherwise, it will use the
fields defined in cls.fields
:param Session session: The sqlalchemy session
:return: The serialized model. It will use the self.fields
attribute for this.
:rtype: dict
"""
model = self.model()
model = self._set_values_on_model(model, values, fields=self.create_fields)
session.add(model)
session.commit()
return self.serialize_model(model) | Creates a new instance of the self.model
and persists it to the database.
:param dict values: The dictionary of values to
set on the model. The key is the column name
and the value is what it will be set to. If
the cls._create_fields is defined then it will
use those fields. Otherwise, it will use the
fields defined in cls.fields
:param Session session: The sqlalchemy session
:return: The serialized model. It will use the self.fields
attribute for this.
:rtype: dict | entailment |
def retrieve(self, session, lookup_keys, *args, **kwargs):
"""
Retrieves a model using the lookup keys provided.
Only one model should be returned by the lookup_keys
or else the manager will fail.
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:return: The dictionary of keys and values for the retrieved
model. The only values returned will be those specified by
fields attrbute on the class
:rtype: dict
:raises: NotFoundException
"""
model = self._get_model(lookup_keys, session)
return self.serialize_model(model) | Retrieves a model using the lookup keys provided.
Only one model should be returned by the lookup_keys
or else the manager will fail.
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:return: The dictionary of keys and values for the retrieved
model. The only values returned will be those specified by
fields attrbute on the class
:rtype: dict
:raises: NotFoundException | entailment |
def retrieve_list(self, session, filters, *args, **kwargs):
"""
Retrieves a list of the model for this manager.
It is restricted by the filters provided.
:param Session session: The SQLAlchemy session to use
:param dict filters: The filters to restrict the returned
models on
:return: A tuple of the list of dictionary representation
of the models and the dictionary of meta data
:rtype: list, dict
"""
query = self.queryset(session)
translator = IntegerField('tmp')
pagination_count = translator.translate(
filters.pop(self.pagination_count_query_arg, self.paginate_by)
)
pagination_pk = translator.translate(
filters.pop(self.pagination_pk_query_arg, 1)
)
pagination_pk -= 1 # logic works zero based. Pagination shouldn't be though
query = query.filter_by(**filters)
if pagination_pk:
query = query.offset(pagination_pk * pagination_count)
if pagination_count:
query = query.limit(pagination_count + 1)
count = query.count()
next_link = None
previous_link = None
if count > pagination_count:
next_link = {self.pagination_pk_query_arg: pagination_pk + 2,
self.pagination_count_query_arg: pagination_count}
if pagination_pk > 0:
previous_link = {self.pagination_pk_query_arg: pagination_pk,
self.pagination_count_query_arg: pagination_count}
field_dict = self.dot_field_list_to_dict(self.list_fields)
props = self.serialize_model(query[:pagination_count], field_dict=field_dict)
meta = dict(links=dict(next=next_link, previous=previous_link))
return props, meta | Retrieves a list of the model for this manager.
It is restricted by the filters provided.
:param Session session: The SQLAlchemy session to use
:param dict filters: The filters to restrict the returned
models on
:return: A tuple of the list of dictionary representation
of the models and the dictionary of meta data
:rtype: list, dict | entailment |
def update(self, session, lookup_keys, updates, *args, **kwargs):
"""
Updates the model with the specified lookup_keys and returns
the dictified object.
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:param dict updates: The columns and the values to update
them to.
:return: The dictionary of keys and values for the retrieved
model. The only values returned will be those specified by
fields attrbute on the class
:rtype: dict
:raises: NotFoundException
"""
model = self._get_model(lookup_keys, session)
model = self._set_values_on_model(model, updates, fields=self.update_fields)
session.commit()
return self.serialize_model(model) | Updates the model with the specified lookup_keys and returns
the dictified object.
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:param dict updates: The columns and the values to update
them to.
:return: The dictionary of keys and values for the retrieved
model. The only values returned will be those specified by
fields attrbute on the class
:rtype: dict
:raises: NotFoundException | entailment |
def delete(self, session, lookup_keys, *args, **kwargs):
"""
Deletes the model found using the lookup_keys
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:return: An empty dictionary
:rtype: dict
:raises: NotFoundException
"""
model = self._get_model(lookup_keys, session)
session.delete(model)
session.commit()
return {} | Deletes the model found using the lookup_keys
:param Session session: The SQLAlchemy session to use
:param dict lookup_keys: A dictionary mapping the fields
and their expected values
:return: An empty dictionary
:rtype: dict
:raises: NotFoundException | entailment |
def serialize_model(self, model, field_dict=None):
"""
Takes a model and serializes the fields provided into
a dictionary.
:param Model model: The Sqlalchemy model instance to serialize
:param dict field_dict: The dictionary of fields to return.
:return: The serialized model.
:rtype: dict
"""
response = self._serialize_model_helper(model, field_dict=field_dict)
return make_json_safe(response) | Takes a model and serializes the fields provided into
a dictionary.
:param Model model: The Sqlalchemy model instance to serialize
:param dict field_dict: The dictionary of fields to return.
:return: The serialized model.
:rtype: dict | entailment |
def _serialize_model_helper(self, model, field_dict=None):
"""
A recursive function for serializing a model
into a json ready format.
"""
field_dict = field_dict or self.dot_field_list_to_dict()
if model is None:
return None
if isinstance(model, Query):
model = model.all()
if isinstance(model, (list, set)):
return [self.serialize_model(m, field_dict=field_dict) for m in model]
model_dict = {}
for name, sub in six.iteritems(field_dict):
value = getattr(model, name)
if sub:
value = self.serialize_model(value, field_dict=sub)
model_dict[name] = value
return model_dict | A recursive function for serializing a model
into a json ready format. | entailment |
def _get_model(self, lookup_keys, session):
"""
Gets the sqlalchemy Model instance associated with
the lookup keys.
:param dict lookup_keys: A dictionary of the keys
and their associated values.
:param Session session: The sqlalchemy session
:return: The sqlalchemy orm model instance.
"""
try:
return self.queryset(session).filter_by(**lookup_keys).one()
except NoResultFound:
raise NotFoundException('No model of type {0} was found using '
'lookup_keys {1}'.format(self.model.__name__, lookup_keys)) | Gets the sqlalchemy Model instance associated with
the lookup keys.
:param dict lookup_keys: A dictionary of the keys
and their associated values.
:param Session session: The sqlalchemy session
:return: The sqlalchemy orm model instance. | entailment |
def _set_values_on_model(self, model, values, fields=None):
"""
Updates the values with the specified values.
:param Model model: The sqlalchemy model instance
:param dict values: The dictionary of attributes and
the values to set.
:param list fields: A list of strings indicating
the valid fields. Defaults to self.fields.
:return: The model with the updated
:rtype: Model
"""
fields = fields or self.fields
for name, val in six.iteritems(values):
if name not in fields:
continue
setattr(model, name, val)
return model | Updates the values with the specified values.
:param Model model: The sqlalchemy model instance
:param dict values: The dictionary of attributes and
the values to set.
:param list fields: A list of strings indicating
the valid fields. Defaults to self.fields.
:return: The model with the updated
:rtype: Model | entailment |
def print_commands():
"""Prints all commands available from Log with their
description.
"""
dummy_log_file = Log()
commands = Log.commands()
commands.sort()
for cmd in commands:
cmd = getattr(dummy_log_file, 'cmd_{0}'.format(cmd))
description = cmd.__doc__
if description:
description = re.sub(r'\n\s+', ' ', description)
description = description.strip()
print('{0}: {1}\n'.format(cmd.__name__, description)) | Prints all commands available from Log with their
description. | entailment |
def print_filters():
"""Prints all filters available with their description."""
for filter_name in VALID_FILTERS:
filter_func = getattr(filters, 'filter_{0}'.format(filter_name))
description = filter_func.__doc__
if description:
description = re.sub(r'\n\s+', ' ', description)
description.strip()
print('{0}: {1}\n'.format(filter_name, description)) | Prints all filters available with their description. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.