sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def send_buffer(self):
"""Utility function that sends the buffer into the provided socket.
The buffer itself will slowly clear out and is modified in place.
"""
buf = self._send_buf
sock = self.connection._sock
try:
timeout = sock.gettimeout()
sock.setblocking(False)
try:
for idx, item in enumerate(buf):
sent = 0
while 1:
try:
sent = sock.send(item)
except IOError as e:
if e.errno == errno.EAGAIN:
continue
elif e.errno == errno.EWOULDBLOCK:
break
raise
self.sent_something = True
break
if sent < len(item):
buf[:idx + 1] = [item[sent:]]
break
else:
del buf[:]
finally:
sock.settimeout(timeout)
except IOError as e:
self.connection.disconnect()
if isinstance(e, socket.timeout):
raise TimeoutError('Timeout writing to socket (host %s)'
% self.host_id)
raise ConnectionError('Error while writing to socket (host %s): %s'
% (self.host_id, e)) | Utility function that sends the buffer into the provided socket.
The buffer itself will slowly clear out and is modified in place. | entailment |
def send_pending_requests(self):
"""Sends all pending requests into the connection. The default is
to only send pending data that fits into the socket without blocking.
This returns `True` if all data was sent or `False` if pending data
is left over.
"""
assert_open(self)
unsent_commands = self.commands
if unsent_commands:
self.commands = []
if self.auto_batch:
unsent_commands = auto_batch_commands(unsent_commands)
buf = []
for command_name, args, options, promise in unsent_commands:
buf.append((command_name,) + tuple(args))
self.pending_responses.append((command_name, options, promise))
cmds = self.connection.pack_commands(buf)
self._send_buf.extend(cmds)
if not self._send_buf:
return True
self.send_buffer()
return not self._send_buf | Sends all pending requests into the connection. The default is
to only send pending data that fits into the socket without blocking.
This returns `True` if all data was sent or `False` if pending data
is left over. | entailment |
def wait_for_responses(self, client):
"""Waits for all responses to come back and resolves the
eventual results.
"""
assert_open(self)
if self.has_pending_requests:
raise RuntimeError('Cannot wait for responses if there are '
'pending requests outstanding. You need '
'to wait for pending requests to be sent '
'first.')
pending = self.pending_responses
self.pending_responses = []
for command_name, options, promise in pending:
value = client.parse_response(
self.connection, command_name, **options)
promise.resolve(value) | Waits for all responses to come back and resolves the
eventual results. | entailment |
def _get_command_buffer(self, host_id, command_name):
"""Returns the command buffer for the given command and arguments."""
buf = self._cb_poll.get(host_id)
if buf is not None:
return buf
if self._max_concurrency is not None:
while len(self._cb_poll) >= self._max_concurrency:
self.join(timeout=1.0)
def connect():
return self.connection_pool.get_connection(
command_name, shard_hint=host_id)
buf = CommandBuffer(host_id, connect, self.auto_batch)
self._cb_poll.register(host_id, buf)
return buf | Returns the command buffer for the given command and arguments. | entailment |
def _release_command_buffer(self, command_buffer):
"""This is called by the command buffer when it closes."""
if command_buffer.closed:
return
self._cb_poll.unregister(command_buffer.host_id)
self.connection_pool.release(command_buffer.connection)
command_buffer.connection = None | This is called by the command buffer when it closes. | entailment |
def join(self, timeout=None):
"""Waits for all outstanding responses to come back or the timeout
to be hit.
"""
remaining = timeout
while self._cb_poll and (remaining is None or remaining > 0):
now = time.time()
rv = self._cb_poll.poll(remaining)
if remaining is not None:
remaining -= (time.time() - now)
for command_buffer, event in rv:
# This command buffer still has pending requests which
# means we have to send them out first before we can read
# all the data from it.
if command_buffer.has_pending_requests:
if event == 'close':
self._try_reconnect(command_buffer)
elif event == 'write':
self._send_or_reconnect(command_buffer)
# The general assumption is that all response is available
# or this might block. On reading we do not use async
# receiving. This generally works because latency in the
# network is low and redis is super quick in sending. It
# does not make a lot of sense to complicate things here.
elif event in ('read', 'close'):
try:
command_buffer.wait_for_responses(self)
finally:
self._release_command_buffer(command_buffer)
if self._cb_poll and timeout is not None:
raise TimeoutError('Did not receive all data in time.') | Waits for all outstanding responses to come back or the timeout
to be hit. | entailment |
def target(self, hosts):
"""Temporarily retarget the client for one call. This is useful
when having to deal with a subset of hosts for one call.
"""
if self.__is_retargeted:
raise TypeError('Cannot use target more than once.')
rv = FanoutClient(hosts, connection_pool=self.connection_pool,
max_concurrency=self._max_concurrency)
rv._cb_poll = self._cb_poll
rv.__is_retargeted = True
return rv | Temporarily retarget the client for one call. This is useful
when having to deal with a subset of hosts for one call. | entailment |
def target_key(self, key):
"""Temporarily retarget the client for one call to route
specifically to the one host that the given key routes to. In
that case the result on the promise is just the one host's value
instead of a dictionary.
.. versionadded:: 1.3
"""
router = self.connection_pool.cluster.get_router()
host_id = router.get_host_for_key(key)
rv = self.target([host_id])
rv.__resolve_singular_result = True
return rv | Temporarily retarget the client for one call to route
specifically to the one host that the given key routes to. In
that case the result on the promise is just the one host's value
instead of a dictionary.
.. versionadded:: 1.3 | entailment |
def get_mapping_client(self, max_concurrency=64, auto_batch=None):
"""Returns a thread unsafe mapping client. This client works
similar to a redis pipeline and returns eventual result objects.
It needs to be joined on to work properly. Instead of using this
directly you shold use the :meth:`map` context manager which
automatically joins.
Returns an instance of :class:`MappingClient`.
"""
if auto_batch is None:
auto_batch = self.auto_batch
return MappingClient(connection_pool=self.connection_pool,
max_concurrency=max_concurrency,
auto_batch=auto_batch) | Returns a thread unsafe mapping client. This client works
similar to a redis pipeline and returns eventual result objects.
It needs to be joined on to work properly. Instead of using this
directly you shold use the :meth:`map` context manager which
automatically joins.
Returns an instance of :class:`MappingClient`. | entailment |
def get_fanout_client(self, hosts, max_concurrency=64,
auto_batch=None):
"""Returns a thread unsafe fanout client.
Returns an instance of :class:`FanoutClient`.
"""
if auto_batch is None:
auto_batch = self.auto_batch
return FanoutClient(hosts, connection_pool=self.connection_pool,
max_concurrency=max_concurrency,
auto_batch=auto_batch) | Returns a thread unsafe fanout client.
Returns an instance of :class:`FanoutClient`. | entailment |
def map(self, timeout=None, max_concurrency=64, auto_batch=None):
"""Returns a context manager for a map operation. This runs
multiple queries in parallel and then joins in the end to collect
all results.
In the context manager the client available is a
:class:`MappingClient`. Example usage::
results = {}
with cluster.map() as client:
for key in keys_to_fetch:
results[key] = client.get(key)
for key, promise in results.iteritems():
print '%s => %s' % (key, promise.value)
"""
return MapManager(self.get_mapping_client(max_concurrency, auto_batch),
timeout=timeout) | Returns a context manager for a map operation. This runs
multiple queries in parallel and then joins in the end to collect
all results.
In the context manager the client available is a
:class:`MappingClient`. Example usage::
results = {}
with cluster.map() as client:
for key in keys_to_fetch:
results[key] = client.get(key)
for key, promise in results.iteritems():
print '%s => %s' % (key, promise.value) | entailment |
def fanout(self, hosts=None, timeout=None, max_concurrency=64,
auto_batch=None):
"""Returns a context manager for a map operation that fans out to
manually specified hosts instead of using the routing system. This
can for instance be used to empty the database on all hosts. The
context manager returns a :class:`FanoutClient`. Example usage::
with cluster.fanout(hosts=[0, 1, 2, 3]) as client:
results = client.info()
for host_id, info in results.value.iteritems():
print '%s -> %s' % (host_id, info['is'])
The promise returned accumulates all results in a dictionary keyed
by the `host_id`.
The `hosts` parameter is a list of `host_id`\s or alternatively the
string ``'all'`` to send the commands to all hosts.
The fanout APi needs to be used with a lot of care as it can cause
a lot of damage when keys are written to hosts that do not expect
them.
"""
return MapManager(self.get_fanout_client(hosts, max_concurrency,
auto_batch),
timeout=timeout) | Returns a context manager for a map operation that fans out to
manually specified hosts instead of using the routing system. This
can for instance be used to empty the database on all hosts. The
context manager returns a :class:`FanoutClient`. Example usage::
with cluster.fanout(hosts=[0, 1, 2, 3]) as client:
results = client.info()
for host_id, info in results.value.iteritems():
print '%s -> %s' % (host_id, info['is'])
The promise returned accumulates all results in a dictionary keyed
by the `host_id`.
The `hosts` parameter is a list of `host_id`\s or alternatively the
string ``'all'`` to send the commands to all hosts.
The fanout APi needs to be used with a lot of care as it can cause
a lot of damage when keys are written to hosts that do not expect
them. | entailment |
def resolved(value):
"""Creates a promise object resolved with a certain value."""
p = Promise()
p._state = 'resolved'
p.value = value
return p | Creates a promise object resolved with a certain value. | entailment |
def rejected(reason):
"""Creates a promise object rejected with a certain value."""
p = Promise()
p._state = 'rejected'
p.reason = reason
return p | Creates a promise object rejected with a certain value. | entailment |
def resolve(self, value):
"""Resolves the promise with the given value."""
if self is value:
raise TypeError('Cannot resolve promise with itself.')
if isinstance(value, Promise):
value.done(self.resolve, self.reject)
return
if self._state != 'pending':
raise RuntimeError('Promise is no longer pending.')
self.value = value
self._state = 'resolved'
callbacks = self._callbacks
self._callbacks = None
for callback in callbacks:
callback(value) | Resolves the promise with the given value. | entailment |
def reject(self, reason):
"""Rejects the promise with the given reason."""
if self._state != 'pending':
raise RuntimeError('Promise is no longer pending.')
self.reason = reason
self._state = 'rejected'
errbacks = self._errbacks
self._errbacks = None
for errback in errbacks:
errback(reason) | Rejects the promise with the given reason. | entailment |
def done(self, on_success=None, on_failure=None):
"""Attaches some callbacks to the promise and returns the promise."""
if on_success is not None:
if self._state == 'pending':
self._callbacks.append(on_success)
elif self._state == 'resolved':
on_success(self.value)
if on_failure is not None:
if self._state == 'pending':
self._errbacks.append(on_failure)
elif self._state == 'rejected':
on_failure(self.reason)
return self | Attaches some callbacks to the promise and returns the promise. | entailment |
def then(self, success=None, failure=None):
"""A utility method to add success and/or failure callback to the
promise which will also return another promise in the process.
"""
rv = Promise()
def on_success(v):
try:
rv.resolve(success(v))
except Exception as e:
rv.reject(e)
def on_failure(r):
try:
rv.resolve(failure(r))
except Exception as e:
rv.reject(e)
self.done(on_success, on_failure)
return rv | A utility method to add success and/or failure callback to the
promise which will also return another promise in the process. | entailment |
def get_key(self, command, args):
"""Returns the key a command operates on."""
spec = COMMANDS.get(command.upper())
if spec is None:
raise UnroutableCommand('The command "%r" is unknown to the '
'router and cannot be handled as a '
'result.' % command)
if 'movablekeys' in spec['flags']:
raise UnroutableCommand('The keys for "%r" are movable and '
'as such cannot be routed to a single '
'host.')
keys = extract_keys(args, spec['key_spec'])
if len(keys) == 1:
return keys[0]
elif not keys:
raise UnroutableCommand(
'The command "%r" does not operate on a key which means '
'that no suitable host could be determined. Consider '
'using a fanout instead.')
raise UnroutableCommand(
'The command "%r" operates on multiple keys (%d passed) which is '
'something that is not supported.' % (command, len(keys))) | Returns the key a command operates on. | entailment |
def get_host_for_command(self, command, args):
"""Returns the host this command should be executed against."""
return self.get_host_for_key(self.get_key(command, args)) | Returns the host this command should be executed against. | entailment |
def _rebuild_circle(self):
"""Updates the hash ring."""
self._hashring = {}
self._sorted_keys = []
total_weight = 0
for node in self._nodes:
total_weight += self._weights.get(node, 1)
for node in self._nodes:
weight = self._weights.get(node, 1)
ks = math.floor((40 * len(self._nodes) * weight) / total_weight)
for i in xrange(0, int(ks)):
k = md5_bytes('%s-%s-salt' % (node, i))
for l in xrange(0, 4):
key = ((k[3 + l * 4] << 24) | (k[2 + l * 4] << 16) |
(k[1 + l * 4] << 8) | k[l * 4])
self._hashring[key] = node
self._sorted_keys.append(key)
self._sorted_keys.sort() | Updates the hash ring. | entailment |
def _get_node_pos(self, key):
"""Return node position(integer) for a given key or None."""
if not self._hashring:
return
k = md5_bytes(key)
key = (k[3] << 24) | (k[2] << 16) | (k[1] << 8) | k[0]
nodes = self._sorted_keys
pos = bisect(nodes, key)
if pos == len(nodes):
return 0
return pos | Return node position(integer) for a given key or None. | entailment |
def remove_node(self, node):
"""Removes node from circle and rebuild it."""
try:
self._nodes.remove(node)
del self._weights[node]
except (KeyError, ValueError):
pass
self._rebuild_circle() | Removes node from circle and rebuild it. | entailment |
def add_node(self, node, weight=1):
"""Adds node to circle and rebuild it."""
self._nodes.add(node)
self._weights[node] = weight
self._rebuild_circle() | Adds node to circle and rebuild it. | entailment |
def get_node(self, key):
"""Return node for a given key. Else return None."""
pos = self._get_node_pos(key)
if pos is None:
return None
return self._hashring[self._sorted_keys[pos]] | Return node for a given key. Else return None. | entailment |
def check_error(result, func, cargs):
"Error checking proper value returns"
if result != 0:
msg = 'Error in "%s": %s' % (func.__name__, get_errors(result) )
raise RTreeError(msg)
return | Error checking proper value returns | entailment |
def ddtodms(self, dd):
"""Take in dd string and convert to dms"""
negative = dd < 0
dd = abs(dd)
minutes,seconds = divmod(dd*3600,60)
degrees,minutes = divmod(minutes,60)
if negative:
if degrees > 0:
degrees = -degrees
elif minutes > 0:
minutes = -minutes
else:
seconds = -seconds
return (degrees,minutes,seconds) | Take in dd string and convert to dms | entailment |
def dmstodd(self, dms):
""" convert dms to dd"""
size = len(dms)
letters = 'WENS'
is_annotated = False
try:
float(dms)
except ValueError:
for letter in letters:
if letter in dms.upper():
is_annotated = True
break
if not is_annotated:
raise core.RTreeError("unable to parse '%s' to decimal degrees" % dms)
is_negative = False
if is_annotated:
dms_upper = dms.upper()
if 'W' in dms_upper or 'S' in dms_upper:
is_negative = True
else:
if dms < 0:
is_negative = True
if is_annotated:
bletters = letters.encode(encoding='utf-8')
bdms = dms.encode(encoding = 'utf-8')
dms = bdms.translate(None, bletters).decode('ascii')
# bletters = bytes(letters, encoding='utf-8')
# bdms = bytes(dms, encoding='utf-8')
# dms = bdms.translate(None, bletters).decode('ascii')
# dms = dms.translate(None, letters) # Python 2.x version
pieces = dms.split(".")
D = 0.0
M = 0.0
S = 0.0
divisor = 3600.0
if len(pieces) == 1:
S = dms[-2:]
M = dms[-4:-2]
D = dms[:-4]
else:
S = '{0:s}.{1:s}'.format (pieces[0][-2:], pieces[1])
M = pieces[0][-4:-2]
D = pieces[0][:-4]
DD = float(D) + float(M)/60.0 + float(S)/divisor
if is_negative:
DD = DD * -1.0
return DD | convert dms to dd | entailment |
def get_fobj(fname, mode='w+'):
"""Obtain a proper file object.
Parameters
----------
fname : string, file object, file descriptor
If a string or file descriptor, then we create a file object. If
*fname* is a file object, then we do nothing and ignore the specified
*mode* parameter.
mode : str
The mode of the file to be opened.
Returns
-------
fobj : file object
The file object.
close : bool
If *fname* was a string, then *close* will be *True* to signify that
the file object should be closed after writing to it. Otherwise,
*close* will be *False* signifying that the user, in essence,
created the file object already and that subsequent operations
should not close it.
"""
if is_string_like(fname):
fobj = open(fname, mode)
close = True
elif hasattr(fname, 'write'):
# fname is a file-like object, perhaps a StringIO (for example)
fobj = fname
close = False
else:
# assume it is a file descriptor
fobj = os.fdopen(fname, mode)
close = False
return fobj, close | Obtain a proper file object.
Parameters
----------
fname : string, file object, file descriptor
If a string or file descriptor, then we create a file object. If
*fname* is a file object, then we do nothing and ignore the specified
*mode* parameter.
mode : str
The mode of the file to be opened.
Returns
-------
fobj : file object
The file object.
close : bool
If *fname* was a string, then *close* will be *True* to signify that
the file object should be closed after writing to it. Otherwise,
*close* will be *False* signifying that the user, in essence,
created the file object already and that subsequent operations
should not close it. | entailment |
def graph_from_dot_file(path):
"""Load graph as defined by a DOT file.
The file is assumed to be in DOT format. It will
be loaded, parsed and a Dot class will be returned,
representing the graph.
"""
fd = open(path, 'rb')
data = fd.read()
fd.close()
return graph_from_dot_data(data) | Load graph as defined by a DOT file.
The file is assumed to be in DOT format. It will
be loaded, parsed and a Dot class will be returned,
representing the graph. | entailment |
def graph_from_edges(edge_list, node_prefix='', directed=False):
"""Creates a basic graph out of an edge list.
The edge list has to be a list of tuples representing
the nodes connected by the edge.
The values can be anything: bool, int, float, str.
If the graph is undirected by default, it is only
calculated from one of the symmetric halves of the matrix.
"""
if edge_list is None:
edge_list = []
graph_type = "digraph" if directed else "graph"
with_prefix = functools.partial("{0}{1}".format, node_prefix)
graph = Dot(graph_type=graph_type)
for src, dst in edge_list:
src = with_prefix(src)
dst = with_prefix(dst)
graph.add_edge(Edge(src, dst))
return graph | Creates a basic graph out of an edge list.
The edge list has to be a list of tuples representing
the nodes connected by the edge.
The values can be anything: bool, int, float, str.
If the graph is undirected by default, it is only
calculated from one of the symmetric halves of the matrix. | entailment |
def graph_from_adjacency_matrix(matrix, node_prefix='', directed=False):
"""Creates a basic graph out of an adjacency matrix.
The matrix has to be a list of rows of values
representing an adjacency matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False.
"""
node_orig = 1
if directed:
graph = Dot(graph_type='digraph')
else:
graph = Dot(graph_type='graph')
for row in matrix:
if not directed:
skip = matrix.index(row)
r = row[skip:]
else:
skip = 0
r = row
node_dest = skip + 1
for e in r:
if e:
graph.add_edge(
Edge(
node_prefix + node_orig,
node_prefix + node_dest))
node_dest += 1
node_orig += 1
return graph | Creates a basic graph out of an adjacency matrix.
The matrix has to be a list of rows of values
representing an adjacency matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False. | entailment |
def graph_from_incidence_matrix(matrix, node_prefix='', directed=False):
"""Creates a basic graph out of an incidence matrix.
The matrix has to be a list of rows of values
representing an incidence matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False.
"""
if directed:
graph = Dot(graph_type='digraph')
else:
graph = Dot(graph_type='graph')
for row in matrix:
nodes = []
c = 1
for node in row:
if node:
nodes.append(c * node)
c += 1
nodes.sort()
if len(nodes) == 2:
graph.add_edge(
Edge(
node_prefix + abs(nodes[0]),
node_prefix + nodes[1]))
if not directed:
graph.set_simplify(True)
return graph | Creates a basic graph out of an incidence matrix.
The matrix has to be a list of rows of values
representing an incidence matrix.
The values can be anything: bool, int, float, as long
as they can evaluate to True or False. | entailment |
def __find_executables(path):
"""Used by find_graphviz
path - single directory as a string
If any of the executables are found, it will return a dictionary
containing the program names as keys and their paths as values.
Otherwise returns None
"""
success = False
progs = {
"dot": "",
"twopi": "",
"neato": "",
"circo": "",
"fdp": "",
"sfdp": "",
}
was_quoted = False
path = path.strip()
if path.startswith('"') and path.endswith('"'):
path = path[1:-1]
was_quoted = True
if not os.path.isdir(path):
return None
for prg in progs:
if progs[prg]:
continue
prg_path = os.path.join(path, prg)
prg_exe_path = prg_path + ".exe"
if os.path.exists(prg_path):
if was_quoted:
prg_path = "\"{}\"".format(prg_path)
progs[prg] = prg_path
success = True
elif os.path.exists(prg_exe_path):
if was_quoted:
prg_exe_path = "\"{}\"".format(prg_exe_path)
progs[prg] = prg_exe_path
success = True
if success:
return progs
return None | Used by find_graphviz
path - single directory as a string
If any of the executables are found, it will return a dictionary
containing the program names as keys and their paths as values.
Otherwise returns None | entailment |
def find_graphviz():
"""Locate Graphviz's executables in the system.
Tries three methods:
First: Windows Registry (Windows only)
This requires Mark Hammond's pywin32 is installed.
Secondly: Search the path
It will look for 'dot', 'twopi' and 'neato' in all the directories
specified in the PATH environment variable.
Thirdly: Default install location (Windows only)
It will look for 'dot', 'twopi' and 'neato' in the default install
location under the "Program Files" directory.
It will return a dictionary containing the program names as keys
and their paths as values.
If this fails, it returns None.
"""
# Method 1 (Windows only)
if os.sys.platform == 'win32':
HKEY_LOCAL_MACHINE = 0x80000002
KEY_QUERY_VALUE = 0x0001
RegOpenKeyEx = None
RegQueryValueEx = None
RegCloseKey = None
try:
import win32api
RegOpenKeyEx = win32api.RegOpenKeyEx
RegQueryValueEx = win32api.RegQueryValueEx
RegCloseKey = win32api.RegCloseKey
except ImportError:
# Print a messaged suggesting they install these?
pass
try:
import ctypes
def RegOpenKeyEx(key, subkey, opt, sam):
result = ctypes.c_uint(0)
ctypes.windll.advapi32.RegOpenKeyExA(key, subkey, opt, sam,
ctypes.byref(result))
return result.value
def RegQueryValueEx(hkey, valuename):
data_type = ctypes.c_uint(0)
data_len = ctypes.c_uint(1024)
data = ctypes.create_string_buffer(1024)
# this has a return value, which we should probably check
ctypes.windll.advapi32.RegQueryValueExA(
hkey, valuename, 0, ctypes.byref(data_type),
data, ctypes.byref(data_len))
return data.value
RegCloseKey = ctypes.windll.advapi32.RegCloseKey
except ImportError:
# Print a messaged suggesting they install these?
pass
if RegOpenKeyEx is not None:
# Get the GraphViz install path from the registry
hkey = None
potentialKeys = [
"SOFTWARE\\ATT\\Graphviz",
"SOFTWARE\\AT&T Research Labs\\Graphviz"]
for potentialKey in potentialKeys:
try:
hkey = RegOpenKeyEx(
HKEY_LOCAL_MACHINE,
potentialKey, 0, KEY_QUERY_VALUE)
if hkey is not None:
path = RegQueryValueEx(hkey, "InstallPath")
RegCloseKey(hkey)
# The regitry variable might exist, left by
# old installations but with no value, in those cases
# we keep searching...
if not path:
continue
# Now append the "bin" subdirectory:
path = os.path.join(path, "bin")
progs = __find_executables(path)
if progs is not None:
return progs
except Exception:
pass
else:
break
# Method 2 (Linux, Windows etc)
if 'PATH' in os.environ:
for path in os.environ['PATH'].split(os.pathsep):
progs = __find_executables(path)
if progs is not None:
return progs
# Method 3 (Windows only)
if os.sys.platform == 'win32':
# Try and work out the equivalent of "C:\Program Files" on this
# machine (might be on drive D:, or in a different language)
if 'PROGRAMFILES' in os.environ:
# Note, we could also use the win32api to get this
# information, but win32api may not be installed.
path = os.path.join(os.environ['PROGRAMFILES'], 'ATT',
'GraphViz', 'bin')
else:
# Just in case, try the default...
path = r"C:\Program Files\att\Graphviz\bin"
progs = __find_executables(path)
if progs is not None:
return progs
for path in (
'/usr/bin', '/usr/local/bin',
'/opt/local/bin',
'/opt/bin', '/sw/bin', '/usr/share',
'/Applications/Graphviz.app/Contents/MacOS/'):
progs = __find_executables(path)
if progs is not None:
return progs
# Failed to find GraphViz
return None | Locate Graphviz's executables in the system.
Tries three methods:
First: Windows Registry (Windows only)
This requires Mark Hammond's pywin32 is installed.
Secondly: Search the path
It will look for 'dot', 'twopi' and 'neato' in all the directories
specified in the PATH environment variable.
Thirdly: Default install location (Windows only)
It will look for 'dot', 'twopi' and 'neato' in the default install
location under the "Program Files" directory.
It will return a dictionary containing the program names as keys
and their paths as values.
If this fails, it returns None. | entailment |
def get_node_list(self):
"""Get the list of Node instances.
This method returns the list of Node instances
composing the graph.
"""
node_objs = list()
for obj_dict_list in self.obj_dict['nodes'].values():
node_objs.extend([
Node(obj_dict=obj_d)
for obj_d
in obj_dict_list])
return node_objs | Get the list of Node instances.
This method returns the list of Node instances
composing the graph. | entailment |
def del_edge(self, src_or_list, dst=None, index=None):
"""Delete an edge from the graph.
Given an edge's (source, destination) node names all
matching edges(s) will be deleted if 'index' is not
specified or set to None.
If there are several matching edges and 'index' is
given, only the edge in that position will be deleted.
'index' should be an integer specifying the position
of the edge to delete. If index is larger than the
number of matching edges, no action is taken.
If edges are deleted it returns True. If no action
is taken it returns False.
"""
if isinstance(src_or_list, (list, tuple)):
if dst is not None and isinstance(dst, (int, long)):
index = dst
src, dst = src_or_list
else:
src, dst = src_or_list, dst
if isinstance(src, Node):
src = src.get_name()
if isinstance(dst, Node):
dst = dst.get_name()
if (src, dst) in self.obj_dict['edges']:
if (index is not None and index <
len(self.obj_dict['edges'][(src, dst)])):
del self.obj_dict['edges'][(src, dst)][index]
return True
else:
del self.obj_dict['edges'][(src, dst)]
return True
return False | Delete an edge from the graph.
Given an edge's (source, destination) node names all
matching edges(s) will be deleted if 'index' is not
specified or set to None.
If there are several matching edges and 'index' is
given, only the edge in that position will be deleted.
'index' should be an integer specifying the position
of the edge to delete. If index is larger than the
number of matching edges, no action is taken.
If edges are deleted it returns True. If no action
is taken it returns False. | entailment |
def get_edge_list(self):
"""Get the list of Edge instances.
This method returns the list of Edge instances
composing the graph.
"""
edge_objs = list()
for obj_dict_list in self.obj_dict['edges'].values():
edge_objs.extend([
Edge(obj_dict=obj_d)
for obj_d
in obj_dict_list])
return edge_objs | Get the list of Edge instances.
This method returns the list of Edge instances
composing the graph. | entailment |
def to_string(self):
"""Returns a string representation of the graph in dot language.
It will return the graph and all its subelements in string from.
"""
graph = list()
if self.obj_dict.get('strict', None) is not None:
if self == self.get_parent_graph() and self.obj_dict['strict']:
graph.append('strict ')
if self.obj_dict['name'] == '':
if ('show_keyword' in self.obj_dict and
self.obj_dict['show_keyword']):
graph.append('subgraph {\n')
else:
graph.append('{\n')
else:
graph.append('%s %s {\n' % (self.obj_dict['type'],
self.obj_dict['name']))
for attr, value in sorted(self.obj_dict['attributes'].items(),
key=itemgetter(0)):
if value is not None:
graph.append('%s=%s' % (attr, quote_if_necessary(value)))
else:
graph.append(attr)
graph.append(';\n')
edges_done = set()
edge_obj_dicts = list()
for e in self.obj_dict['edges'].values():
edge_obj_dicts.extend(e)
if edge_obj_dicts:
edge_src_set, edge_dst_set = list(
zip(*[obj['points'] for obj in edge_obj_dicts]))
edge_src_set, edge_dst_set = set(edge_src_set), set(edge_dst_set)
else:
edge_src_set, edge_dst_set = set(), set()
node_obj_dicts = list()
for e in self.obj_dict['nodes'].values():
node_obj_dicts.extend(e)
sgraph_obj_dicts = list()
for sg in self.obj_dict['subgraphs'].values():
sgraph_obj_dicts.extend(sg)
obj_list = sorted([
(obj['sequence'], obj)
for obj
in (edge_obj_dicts + node_obj_dicts + sgraph_obj_dicts)])
for _idx, obj in obj_list:
if obj['type'] == 'node':
node = Node(obj_dict=obj)
if self.obj_dict.get('suppress_disconnected', False):
if (node.get_name() not in edge_src_set and
node.get_name() not in edge_dst_set):
continue
graph.append(node.to_string() + '\n')
elif obj['type'] == 'edge':
edge = Edge(obj_dict=obj)
if self.obj_dict.get('simplify', False) and edge in edges_done:
continue
graph.append(edge.to_string() + '\n')
edges_done.add(edge)
else:
sgraph = Subgraph(obj_dict=obj)
graph.append(sgraph.to_string() + '\n')
graph.append('}\n')
return ''.join(graph) | Returns a string representation of the graph in dot language.
It will return the graph and all its subelements in string from. | entailment |
def write(self, path, prog=None, format='raw'):
"""Write graph to file in selected format.
Given a filename 'path' it will open/create and truncate
such file and write on it a representation of the graph
defined by the dot object and in the format specified by
'format'. 'path' can also be an open file-like object, such as
a StringIO instance.
The format 'raw' is used to dump the string representation
of the Dot object, without further processing.
The output can be processed by any of graphviz tools, defined
in 'prog', which defaults to 'dot'
Returns True or False according to the success of the write
operation.
There's also the preferred possibility of using:
write_'format'(path, prog='program')
which are automatically defined for all the supported formats.
[write_ps(), write_gif(), write_dia(), ...]
"""
if prog is None:
prog = self.prog
fobj, close = get_fobj(path, 'w+b')
try:
if format == 'raw':
data = self.to_string()
if isinstance(data, basestring):
if not isinstance(data, unicode):
try:
data = unicode(data, 'utf-8')
except Exception:
pass
try:
charset = self.get_charset()
if not PY3 or not charset:
charset = 'utf-8'
data = data.encode(charset)
except Exception:
if PY3:
data = data.encode('utf-8')
pass
fobj.write(data)
else:
fobj.write(self.create(prog, format))
finally:
if close:
fobj.close()
return True | Write graph to file in selected format.
Given a filename 'path' it will open/create and truncate
such file and write on it a representation of the graph
defined by the dot object and in the format specified by
'format'. 'path' can also be an open file-like object, such as
a StringIO instance.
The format 'raw' is used to dump the string representation
of the Dot object, without further processing.
The output can be processed by any of graphviz tools, defined
in 'prog', which defaults to 'dot'
Returns True or False according to the success of the write
operation.
There's also the preferred possibility of using:
write_'format'(path, prog='program')
which are automatically defined for all the supported formats.
[write_ps(), write_gif(), write_dia(), ...] | entailment |
def get_crumbs(self):
"""
Get crumbs for navigation links.
Returns:
tuple:
concatenated list of crumbs using these crumbs and the
crumbs of the parent classes through ``__mro__``.
"""
crumbs = []
for cls in reversed(type(self).__mro__[1:]):
crumbs.extend(getattr(cls, 'crumbs', ()))
crumbs.extend(list(self.crumbs))
return tuple(crumbs) | Get crumbs for navigation links.
Returns:
tuple:
concatenated list of crumbs using these crumbs and the
crumbs of the parent classes through ``__mro__``. | entailment |
def get(self, request, *args, **kwargs):
"""
Django view get function.
Add items of extra_context, crumbs and grid to context.
Args:
request (): Django's request object.
*args (): request args.
**kwargs (): request kwargs.
Returns:
response: render to response with context.
"""
context = self.get_context_data(**kwargs)
context.update(self.extra_context)
context['crumbs'] = self.get_crumbs()
context['title'] = self.title
context['suit'] = 'suit' in settings.INSTALLED_APPS
if context.get('dashboard_grid', None) is None and self.grid:
context['dashboard_grid'] = self.grid
return self.render_to_response(context) | Django view get function.
Add items of extra_context, crumbs and grid to context.
Args:
request (): Django's request object.
*args (): request args.
**kwargs (): request kwargs.
Returns:
response: render to response with context. | entailment |
def realtime(widget, url_name=None, url_regex=None, time_interval=None):
"""
Return a widget as real-time.
Args:
widget (Widget): the widget to register and return as real-time.
url_name (str): the URL name to call to get updated content.
url_regex (regex): the URL regex to be matched.
time_interval (int): the interval of refreshment in milliseconds.
Returns:
Widget: the "real-timed" widget.
"""
if not hasattr(widget, 'get_updated_content'):
raise AttributeError('Widget %s must implement get_updated_content '
'method.' % widget)
elif not callable(widget.get_updated_content):
raise ValueError('get_updated_content in widget %s is not callable'
% widget)
if url_name is None:
if getattr(widget, 'url_name', None) is not None:
url_name = widget.url_name
else:
url_name = widget.__class__.__name__
if url_name in [w.url_name for w in REALTIME_WIDGETS]:
raise ValueError('URL name %s is already used by another '
'real time widget.' % url_name)
if url_regex is None:
if getattr(widget, 'url_regex', None) is not None:
url_regex = widget.url_regex
else:
url_regex = sha256(url_name.encode('utf-8'))
url_regex = url_regex.hexdigest()[:32]
url_regex = 'realtime/' + url_regex
if url_regex in [w.url_regex for w in REALTIME_WIDGETS]:
raise ValueError('URL regex %s is already used by another '
'real time widget.' % url_regex)
if time_interval is None:
if getattr(widget, 'time_interval', None) is not None:
time_interval = widget.time_interval
else:
time_interval = app_settings.default_time_interval
from django.views.generic import View
from braces.views import AjaxResponseMixin, JSONResponseMixin
# pylama:ignore=C0111,R0201
class PartialResponse(JSONResponseMixin, AjaxResponseMixin, View):
def get_data(self):
return widget.get_updated_content()
def get(self, request, *args, **kwargs):
return self.get_ajax(request, *args, **kwargs)
def get_ajax(self, request, *args, **kwargs):
return self.render_json_response(self.get_data())
PartialResponse.url_name = url_name
PartialResponse.url_regex = url_regex
PartialResponse.time_interval = time_interval
REALTIME_WIDGETS.append(PartialResponse)
if not hasattr(widget, 'url_name'):
widget.url_name = url_name
if not hasattr(widget, 'url_regex'):
widget.url_regex = url_regex
if not hasattr(widget, 'time_interval'):
widget.time_interval = time_interval
return widget | Return a widget as real-time.
Args:
widget (Widget): the widget to register and return as real-time.
url_name (str): the URL name to call to get updated content.
url_regex (regex): the URL regex to be matched.
time_interval (int): the interval of refreshment in milliseconds.
Returns:
Widget: the "real-timed" widget. | entailment |
def get_realtime_urls(admin_view_func=lambda x: x):
"""
Get the URL for real-time widgets.
Args:
admin_view_func (callable): an admin_view method from an AdminSite
instance. By default: identity.
Returns:
list: the list of the real-time URLs as django's ``url()``.
"""
from .widgets import REALTIME_WIDGETS
return [url(w.url_regex, admin_view_func(w.as_view()), name=w.url_name)
for w in REALTIME_WIDGETS] | Get the URL for real-time widgets.
Args:
admin_view_func (callable): an admin_view method from an AdminSite
instance. By default: identity.
Returns:
list: the list of the real-time URLs as django's ``url()``. | entailment |
def setup_db(connection_string):
"""
Sets up the database schema and adds defaults.
:param connection_string: Database URL. e.g: sqlite:///filename.db
This is usually taken from the config file.
"""
global DB_Session, engine
new_database = False
if connection_string == 'sqlite://' or not database_exists(connection_string):
new_database = True
engine = create_engine(connection_string, connect_args={'timeout': 20})
entities.Base.metadata.create_all(engine)
DB_Session = sessionmaker(bind=engine)
db_path = os.path.dirname(__file__)
if new_database:
# bootstrapping the db with classifications types.
json_file = open(os.path.join(db_path, 'bootstrap.json'))
data = json.load(json_file)
session = get_session()
session.execute('PRAGMA user_version = {0}'.format(beeswarm.server.db.DATABASE_VERSION))
for entry in data['classifications']:
c = session.query(Classification).filter(Classification.type == entry['type']).first()
if not c:
classification = Classification(type=entry['type'], description_short=entry['description_short'],
description_long=entry['description_long'])
session.add(classification)
else:
c.description_short = entry['description_short']
c.description_long = entry['description_long']
for username in data['bait_users']:
u = session.query(BaitUser).filter(BaitUser.username == username).first()
if not u:
logger.debug('Creating default BaitUser: {}'.format(username))
password = data['bait_users'][username]
bait_user = BaitUser(username=username, password=password)
session.add(bait_user)
session.commit()
else:
result = engine.execute("PRAGMA user_version;")
version = result.fetchone()[0]
result.close()
logger.info('Database is at version {0}.'.format(version))
if version != beeswarm.server.db.DATABASE_VERSION:
logger.error('Incompatible database version detected. This version of Beeswarm is compatible with '
'database version {0}, but {1} was found. Please delete the database, restart the Beeswarm '
'server and reconnect the drones.'.format(beeswarm.server.db.DATABASE_VERSION,
version))
sys.exit(1) | Sets up the database schema and adds defaults.
:param connection_string: Database URL. e.g: sqlite:///filename.db
This is usually taken from the config file. | entailment |
def start_pty_request(self, channel, term, modes):
"""Start a PTY - intended to run it a (green)thread."""
request = self.dummy_request()
request._sock = channel
request.modes = modes
request.term = term
request.username = self.username
# This should block until the user quits the pty
self.pty_handler(request, self.client_address, self.tcp_server, self.vfs, self.session)
# Shutdown the entire session
self.transport.close() | Start a PTY - intended to run it a (green)thread. | entailment |
def check_time(self):
""" Make sure our Honeypot time is consistent, and not too far off
from the actual time. """
poll = self.config['timecheck']['poll']
ntp_poll = self.config['timecheck']['ntp_pool']
while True:
clnt = ntplib.NTPClient()
try:
response = clnt.request(ntp_poll, version=3)
diff = response.offset
if abs(diff) >= 15:
logger.error('Timings found to be far off, shutting down drone ({0})'.format(diff))
sys.exit(1)
else:
logger.debug('Polled ntp server and found that drone has {0} seconds offset.'.format(diff))
except (ntplib.NTPException, _socket.error) as ex:
logger.warning('Error while polling ntp server: {0}'.format(ex))
gevent.sleep(poll * 60 * 60) | Make sure our Honeypot time is consistent, and not too far off
from the actual time. | entailment |
def start(self):
""" Starts services. """
# protocol handlers
for c in handlerbase.HandlerBase.__subclasses__():
cap_name = c.__name__.lower()
if cap_name in self.config['capabilities']:
port = self.config['capabilities'][cap_name]['port']
# carve out the options for this specific service
options = self.config['capabilities'][cap_name]
# capabilities are only allowed to append to the session list
cap = c(options, self.work_dir)
try:
# Convention: All capability names which end in 's' will be wrapped in ssl.
if cap_name.endswith('s'):
server = StreamServer(('0.0.0.0', port), cap.handle_session,
keyfile=self.key, certfile=self.cert)
else:
server = StreamServer(('0.0.0.0', port), cap.handle_session)
logger.debug('Adding {0} capability with options: {1}'.format(cap_name, options))
self._servers.append(server)
server_greenlet = Greenlet(server.start())
self._server_greenlets.append(server_greenlet)
except _socket.error as ex:
logger.error("Could not start {0} server on port {1}. Error: {2}".format(c.__name__, port, ex))
else:
logger.info('Started {0} capability listening on port {1}'.format(c.__name__, port))
stop_if_not_write_workdir(self.work_dir)
logger.info("Honeypot running.")
gevent.joinall(self._server_greenlets) | Starts services. | entailment |
def stop(self):
"""Stops services"""
for s in self._servers:
s.stop()
for g in self._server_greenlets:
g.kill()
logger.info('All workers stopped.') | Stops services | entailment |
def prepare_environment(work_dir):
"""
Performs a few maintenance tasks before the Honeypot is run. Copies the data directory,
and the config file to the cwd. The config file copied here is overwritten if
the __init__ method is called with a configuration URL.
:param work_dir: The directory to copy files to.
"""
package_directory = os.path.dirname(os.path.abspath(beeswarm.__file__))
logger.info('Copying data files to workdir.')
shutil.copytree(os.path.join(package_directory, 'drones/honeypot/data'), os.path.join(work_dir, 'data/'),
ignore=Honeypot._ignore_copy_files) | Performs a few maintenance tasks before the Honeypot is run. Copies the data directory,
and the config file to the cwd. The config file copied here is overwritten if
the __init__ method is called with a configuration URL.
:param work_dir: The directory to copy files to. | entailment |
def start(self):
"""
Launches a new HTTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
self.sessions[session.id] = session
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('http', server_host, server_port, session.id))
try:
url = self._make_url(server_host, '/index.html', server_port)
response = self.client.get(url, auth=HTTPBasicAuth(username, password), verify=False)
session.did_connect = True
if response.status_code == 200:
session.add_auth_attempt('plaintext', True, username=username, password=password)
session.did_login = True
else:
session.add_auth_attempt('plaintext', False, username=username, password=password)
links = self._get_links(response)
while self.sent_requests <= self.max_requests and links:
url = random.choice(links)
response = self.client.get(url, auth=HTTPBasicAuth(username, password), verify=False)
links = self._get_links(response)
session.did_complete = True
except Exception as err:
logger.debug('Caught exception: {0} ({1})'.format(err, str(type(err))))
finally:
session.alldone = True
session.end_session()
self.client.close() | Launches a new HTTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def _get_links(self, response):
"""
Parses the response text and returns all the links in it.
:param response: The Response object.
"""
html_text = response.text.encode('utf-8')
doc = document_fromstring(html_text)
links = []
for e in doc.cssselect('a'):
links.append(e.get('href')) | Parses the response text and returns all the links in it.
:param response: The Response object. | entailment |
def bootstrap(server_workdir, drone_workdir):
"""Bootstraps localhost configurations for a Beeswarm server and a honeypot.
:param server_workdir: Output directory for the server configuration file.
:param drone_workdir: Output directory for the drone configuration file.
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)-15s (%(name)s) %(message)s')
console_log = logging.StreamHandler()
console_log.setLevel(logging.INFO)
console_log.setFormatter(formatter)
root_logger.addHandler(console_log)
server_workdir_absolute = os.path.abspath(server_workdir)
old_cwd = os.getcwd()
os.chdir(server_workdir)
server = Server(server_workdir_absolute, None, start_webui=False, customize=False, reset_password=False,
max_sessions=0, server_hostname='127.0.0.1')
logger.info('Server config has been written to {0}'.format(os.path.join(server_workdir, 'beeswarmcfg.json')))
gevent.spawn(server.start, False)
# waiting game to ensure actors has started.
gevent.sleep(2)
os.chdir(old_cwd)
# setting up socket to communicate with ZMQ actor.
context = beeswarm.shared.zmq_context
database_actor = context.socket(zmq.REQ)
database_actor.connect(SocketNames.DATABASE_REQUESTS.value)
db_session = database_setup.get_session()
drone = Honeypot()
protocol_config = (
('ftp', 21, {
'max_attempts': 3,
'banner': 'Microsoft FTP Server',
'syst_type': 'Windows-NT'
}),
('telnet', 23, {
'max_attempts': 3
}),
('pop3', 110, {
'max_attempts': 3
}),
('pop3s', 993, {
'max_attempts': 3
}),
('ssh', 22, {}),
('http', 80, {
'banner': 'Microsoft-IIS/5.0'
}),
('https', 443, {
'banner': 'Microsoft-IIS/5.0'
}),
('smtp', 25, {
'banner': 'Microsoft ESMTP MAIL service ready'
}),
('vnc', 5900, {})
)
for protocol, port, protocol_specific_data in protocol_config:
drone.add_capability(protocol, port, protocol_specific_data)
drone.cert_common_name = '*'
drone.cert_country = 'US'
drone.cert_state = 'None'
drone.cert_locality = 'None'
drone.cert_organization = 'None'
drone.cert_organization_unit = ''
db_session.add(drone)
db_session.commit()
drone_config = send_zmq_request_socket(database_actor, '{0} {1}'.format(Messages.DRONE_CONFIG.value, drone.id))
with open(os.path.join(drone_workdir, 'beeswarmcfg.json'), 'w') as drone_config_file:
drone_config_file.write(json.dumps(drone_config, indent=4))
logger.info('Drone config has been written to {0}'.format(os.path.join(server_workdir, 'beeswarmcfg.json')))
server.stop() | Bootstraps localhost configurations for a Beeswarm server and a honeypot.
:param server_workdir: Output directory for the server configuration file.
:param drone_workdir: Output directory for the drone configuration file. | entailment |
def database_exists(url):
"""Check if a database exists.
:param url: A SQLAlchemy engine URL.
Performs backend-specific testing to quickly determine if a database
exists on the server. ::
database_exists('postgres://postgres@localhost/name') #=> False
create_database('postgres://postgres@localhost/name')
database_exists('postgres://postgres@localhost/name') #=> True
Supports checking against a constructed URL as well. ::
engine = create_engine('postgres://postgres@localhost/name')
database_exists(engine.url) #=> False
create_database(engine.url)
database_exists(engine.url) #=> True
"""
url = copy(make_url(url))
database = url.database
if url.drivername.startswith('postgresql'):
url.database = 'template1'
else:
url.database = None
engine = sa.create_engine(url)
if engine.dialect.name == 'postgresql':
text = "SELECT 1 FROM pg_database WHERE datname='%s'" % database
return bool(engine.execute(text).scalar())
elif engine.dialect.name == 'mysql':
text = ("SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA "
"WHERE SCHEMA_NAME = '%s'" % database)
return bool(engine.execute(text).scalar())
elif engine.dialect.name == 'sqlite':
return database == ':memory:' or os.path.exists(database)
else:
text = 'SELECT 1'
try:
url.database = database
engine = sa.create_engine(url)
engine.execute(text)
return True
except (ProgrammingError, OperationalError):
return False | Check if a database exists.
:param url: A SQLAlchemy engine URL.
Performs backend-specific testing to quickly determine if a database
exists on the server. ::
database_exists('postgres://postgres@localhost/name') #=> False
create_database('postgres://postgres@localhost/name')
database_exists('postgres://postgres@localhost/name') #=> True
Supports checking against a constructed URL as well. ::
engine = create_engine('postgres://postgres@localhost/name')
database_exists(engine.url) #=> False
create_database(engine.url)
database_exists(engine.url) #=> True | entailment |
def message_proxy(self, work_dir):
"""
drone_data_inboud is for data comming from drones
drone_data_outbound is for commands to the drones, topic must either be a drone ID or all for sending
a broadcast message to all drones
"""
public_keys_dir = os.path.join(work_dir, 'certificates', 'public_keys')
secret_keys_dir = os.path.join(work_dir, 'certificates', 'private_keys')
# start and configure auth worker
auth = IOLoopAuthenticator()
auth.start()
auth.allow('127.0.0.1')
auth.configure_curve(domain='*', location=public_keys_dir)
# external interfaces for communicating with drones
server_secret_file = os.path.join(secret_keys_dir, 'beeswarm_server.pri')
server_public, server_secret = load_certificate(server_secret_file)
drone_data_inbound = beeswarm.shared.zmq_context.socket(zmq.PULL)
drone_data_inbound.curve_secretkey = server_secret
drone_data_inbound.curve_publickey = server_public
drone_data_inbound.curve_server = True
drone_data_inbound.bind('tcp://*:{0}'.format(self.config['network']['zmq_port']))
drone_data_outbound = beeswarm.shared.zmq_context.socket(zmq.PUB)
drone_data_outbound.curve_secretkey = server_secret
drone_data_outbound.curve_publickey = server_public
drone_data_outbound.curve_server = True
drone_data_outbound.bind('tcp://*:{0}'.format(self.config['network']['zmq_command_port']))
# internal interfaces
# all inbound session data from drones will be replayed on this socket
drone_data_socket = beeswarm.shared.zmq_context.socket(zmq.PUB)
drone_data_socket.bind(SocketNames.DRONE_DATA.value)
# all commands received on this will be published on the external interface
drone_command_socket = beeswarm.shared.zmq_context.socket(zmq.PULL)
drone_command_socket.bind(SocketNames.DRONE_COMMANDS.value)
poller = zmq.Poller()
poller.register(drone_data_inbound, zmq.POLLIN)
poller.register(drone_command_socket, zmq.POLLIN)
while True:
# .recv() gives no context switch - why not? using poller with timeout instead
socks = dict(poller.poll(100))
gevent.sleep()
if drone_command_socket in socks and socks[drone_command_socket] == zmq.POLLIN:
data = drone_command_socket.recv()
drone_id, _ = data.split(' ', 1)
logger.debug("Sending drone command to: {0}".format(drone_id))
# pub socket takes care of filtering
drone_data_outbound.send(data)
elif drone_data_inbound in socks and socks[drone_data_inbound] == zmq.POLLIN:
raw_msg = drone_data_inbound.recv()
split_data = raw_msg.split(' ', 2)
if len(split_data) == 3:
topic, drone_id, data = split_data
else:
data = None
topic, drone_id, = split_data
logger.debug("Received {0} message from {1}.".format(topic, drone_id))
# relay message on internal socket
drone_data_socket.send(raw_msg) | drone_data_inboud is for data comming from drones
drone_data_outbound is for commands to the drones, topic must either be a drone ID or all for sending
a broadcast message to all drones | entailment |
def start(self):
"""
Starts the BeeSwarm server.
"""
self.started = True
if self.app:
web_port = self.config['network']['web_port']
logger.info('Starting server listening on port {0}'.format(web_port))
key_file = os.path.join(self.work_dir, 'server.key')
cert_file = os.path.join(self.work_dir, 'server.crt')
http_server = WSGIServer(('', web_port), self.app, keyfile=key_file, certfile=cert_file)
http_server_greenlet = gevent.spawn(http_server.serve_forever)
self.greenlets.append(http_server_greenlet)
stop_if_not_write_workdir(self.work_dir)
logger.info('Server started.')
gevent.joinall(self.greenlets) | Starts the BeeSwarm server. | entailment |
def get_config(self, configfile):
"""
Loads the configuration from the JSON file, and returns it.
:param configfile: Path to the configuration file
"""
with open(configfile) as config_file:
config = json.load(config_file)
return config | Loads the configuration from the JSON file, and returns it.
:param configfile: Path to the configuration file | entailment |
def time_in_range(self):
"""Return true if current time is in the active range"""
curr = datetime.datetime.now().time()
if self.start_time <= self.end_time:
return self.start_time <= curr <= self.end_time
else:
return self.start_time <= curr or curr <= self.end_time | Return true if current time is in the active range | entailment |
def start(self):
"""
Launches a new Telnet client session on the server taken from the `self.options` dict.
This session always fails.
:param my_ip: IP of this Client itself
"""
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
self.sessions[session.id] = session
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('vnc', server_host, server_port, session.id))
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
client_socket.connect((server_host, int(server_port)))
session.source_port = client_socket.getsockname()[1]
except socket.error as e:
logger.debug('Caught exception: {0} ({1})'.format(e, str(type(e))))
else:
session.did_connect = True
protocol_version = client_socket.recv(1024)
client_socket.send(RFB_VERSION)
supported_auth_methods = client_socket.recv(1024)
# \x02 implies that VNC authentication method is to be used
# Refer to http://tools.ietf.org/html/rfc6143#section-7.1.2 for more info.
if '\x02' in supported_auth_methods:
client_socket.send(VNC_AUTH)
challenge = client_socket.recv(1024)
# password limit for vnc in 8 chars
aligned_password = (password + '\0' * 8)[:8]
des = RFBDes(aligned_password)
response = des.encrypt(challenge)
client_socket.send(response)
auth_status = client_socket.recv(1024)
if auth_status == AUTH_SUCCESSFUL:
session.add_auth_attempt('des_challenge', True, password=aligned_password)
session.did_login = True
else:
session.add_auth_attempt('des_challenge', False, password=aligned_password)
session.did_login = False
session.did_complete = True
finally:
session.alldone = True
session.end_session()
if client_socket:
client_socket.close() | Launches a new Telnet client session on the server taken from the `self.options` dict.
This session always fails.
:param my_ip: IP of this Client itself | entailment |
def create_session(self, server_host, server_port, honeypot_id):
"""
Creates a new session.
:param server_host: IP address of the server
:param server_port: Server port
:return: A new `BaitSession` object.
"""
protocol = self.__class__.__name__.lower()
session = BaitSession(protocol, server_host, server_port, honeypot_id)
self.sessions[session.id] = session
return session | Creates a new session.
:param server_host: IP address of the server
:param server_port: Server port
:return: A new `BaitSession` object. | entailment |
def start(self):
"""
Launches a new FTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
command_limit = random.randint(6, 11)
session = self.create_session(server_host, server_port, honeypot_id)
self.sessions[session.id] = session
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('ftp', server_host, server_port, session.id))
self.file_list = []
try:
self.connect()
session.did_connect = True
# TODO: Catch login failure
self.login(username, password)
session.add_auth_attempt('plaintext', True, username=username, password=password)
session.did_login = True
session.timestamp = datetime.utcnow()
except ftplib.error_perm as err:
logger.debug('Caught exception: {0} ({1})'.format(err, str(type(err))))
except socket.error as err:
logger.debug('Error while communicating: {0} ({1})'.format(err, str(type(err))))
else:
command_count = 0
while command_count <= command_limit:
command_count += 1
try:
self.sense()
cmd, param = self.decide()
self.act(cmd, param)
gevent.sleep(random.uniform(0, 3))
except IndexError: # This means we hit an empty folder, or a folder with only files.
continue
session.did_complete = True
finally:
if self.client.sock is not None:
# will close socket
self.client.quit()
session.alldone = True
session.end_session() | Launches a new FTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def sense(self):
"""
Launches a few "sensing" commands such as 'ls', or 'pwd'
and updates the current bait state.
"""
cmd_name = random.choice(self.senses)
command = getattr(self, cmd_name)
self.state['last_command'] = cmd_name
command() | Launches a few "sensing" commands such as 'ls', or 'pwd'
and updates the current bait state. | entailment |
def decide(self):
"""
Decides the next command to be launched based on the current state.
:return: Tuple containing the next command name, and it's parameters.
"""
next_command_name = random.choice(self.COMMAND_MAP[self.state['last_command']])
param = ''
if next_command_name == 'retrieve':
param = random.choice(self.state['file_list'])
elif next_command_name == 'cwd':
param = random.choice(self.state['dir_list'])
return next_command_name, param | Decides the next command to be launched based on the current state.
:return: Tuple containing the next command name, and it's parameters. | entailment |
def act(self, cmd_name, param):
"""
Run the command with the parameters.
:param cmd_name: The name of command to run
:param param: Params for the command
"""
command = getattr(self, cmd_name)
if param:
command(param)
else:
command() | Run the command with the parameters.
:param cmd_name: The name of command to run
:param param: Params for the command | entailment |
def list(self):
"""
Run the FTP LIST command, and update the state.
"""
logger.debug('Sending FTP list command.')
self.state['file_list'] = []
self.state['dir_list'] = []
self.client.retrlines('LIST', self._process_list) | Run the FTP LIST command, and update the state. | entailment |
def retrieve(self, filename):
"""
Run the FTP RETR command, and download the file
:param filename: Name of the file to download
"""
logger.debug('Sending FTP retr command. Filename: {}'.format(filename))
self.client.retrbinary('RETR {}'.format(filename), self._save_file) | Run the FTP RETR command, and download the file
:param filename: Name of the file to download | entailment |
def cwd(self, newdir):
"""
Send the FTP CWD command
:param newdir: Directory to change to
"""
logger.debug('Sending FTP cwd command. New Workding Directory: {}'.format(newdir))
self.client.cwd(newdir)
self.state['current_dir'] = self.client.pwd() | Send the FTP CWD command
:param newdir: Directory to change to | entailment |
def _process_list(self, list_line):
# -rw-r--r-- 1 ftp ftp 68 May 09 19:37 testftp.txt
"""
Processes a line of 'ls -l' output, and updates state accordingly.
:param list_line: Line to process
"""
res = list_line.split(' ', 8)
if res[0].startswith('-'):
self.state['file_list'].append(res[-1])
if res[0].startswith('d'):
self.state['dir_list'].append(res[-1]) | Processes a line of 'ls -l' output, and updates state accordingly.
:param list_line: Line to process | entailment |
def validate_time_range(form, field):
""" Makes sure the form data is in 'hh:mm - hh:mm' format and the start time is less than end time."""
string = field.data
try:
begin, end = string.split('-')
begin = begin.strip()
end = end.strip()
begin_hours, begin_min = begin.split(':')
end_hours, end_min = end.split(':')
assert 0 <= int(begin_hours) <= 23
assert 0 <= int(end_hours) <= 23
assert 0 <= int(begin_min) <= 59
assert 0 <= int(end_min) <= 59
assert begin_hours <= end_hours
if begin_hours == end_hours:
assert begin_min < end_min
except (ValueError, AssertionError):
raise ValidationError('Make sure the time is in correct format: "hh:mm - hh:mm"') | Makes sure the form data is in 'hh:mm - hh:mm' format and the start time is less than end time. | entailment |
def start(self):
"""
Starts sending client bait to the configured Honeypot.
"""
logger.info('Starting client.')
self.dispatcher_greenlets = []
for _, entry in self.config['baits'].items():
for b in clientbase.ClientBase.__subclasses__():
bait_name = b.__name__.lower()
# if the bait has a entry in the config we consider the bait enabled
if bait_name in entry:
bait_options = entry[bait_name]
dispatcher = BaitDispatcher(b, bait_options)
dispatcher.start()
self.dispatcher_greenlets.append(dispatcher)
logger.info('Adding {0} bait'.format(bait_name))
logger.debug('Bait added with options: {0}'.format(bait_options))
gevent.joinall(self.dispatcher_greenlets) | Starts sending client bait to the configured Honeypot. | entailment |
def start(self):
"""
Launches a new POP3 client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
try:
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('pop3', server_host, server_port,
session.id))
conn = poplib.POP3_SSL(server_host, server_port)
session.source_port = conn.sock.getsockname()[1]
banner = conn.getwelcome()
session.protocol_data['banner'] = banner
session.did_connect = True
conn.user(username)
conn.pass_(password)
# TODO: Handle failed login
session.add_auth_attempt('plaintext', True, username=username, password=password)
session.did_login = True
session.timestamp = datetime.utcnow()
except Exception as err:
logger.debug('Caught exception: {0} ({1})'.format(err, str(type(err))))
else:
list_entries = conn.list()[1]
for entry in list_entries:
index, _ = entry.split(' ')
conn.retr(index)
conn.dele(index)
logger.debug('Found and deleted {0} messages on {1}'.format(len(list_entries), server_host))
conn.quit()
session.did_complete = True
finally:
session.alldone = True
session.end_session() | Launches a new POP3 client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def get_matching_session(self, session, db_session, timediff=5):
"""
Tries to match a session with it's counterpart. For bait session it will try to match it with honeypot sessions
and the other way around.
:param session: session object which will be used as base for query.
:param timediff: +/- allowed time difference between a session and a potential matching session.
"""
db_session = db_session
min_datetime = session.timestamp - timedelta(seconds=timediff)
max_datetime = session.timestamp + timedelta(seconds=timediff)
# default return value
match = None
classification = db_session.query(Classification).filter(
Classification.type == 'pending').one()
# get all sessions that match basic properties.
sessions = db_session.query(Session).options(joinedload(Session.authentication)) \
.filter(Session.protocol == session.protocol) \
.filter(Session.honeypot == session.honeypot) \
.filter(Session.timestamp >= min_datetime) \
.filter(Session.timestamp <= max_datetime) \
.filter(Session.id != session.id) \
.filter(Session.classification == classification)
# identify the correct session by comparing authentication.
# this could properly also be done using some fancy ORM/SQL construct.
for potential_match in sessions:
if potential_match.discriminator == session.discriminator:
continue
assert potential_match.id != session.id
for honey_auth in session.authentication:
for session_auth in potential_match.authentication:
if session_auth.username == honey_auth.username and \
session_auth.password == honey_auth.password and \
session_auth.successful == honey_auth.successful:
assert potential_match.id != session.id
match = potential_match
break
return match | Tries to match a session with it's counterpart. For bait session it will try to match it with honeypot sessions
and the other way around.
:param session: session object which will be used as base for query.
:param timediff: +/- allowed time difference between a session and a potential matching session. | entailment |
def _classify_malicious_sessions(self):
"""
Will classify all unclassified sessions as malicious activity.
:param delay_seconds: no sessions newer than (now - delay_seconds) will be processed.
"""
min_datetime = datetime.utcnow() - timedelta(seconds=self.delay_seconds)
db_session = database_setup.get_session()
# find and process bait sessions that did not get classified during
# persistence.
bait_sessions = db_session.query(BaitSession).options(joinedload(BaitSession.authentication)) \
.filter(BaitSession.classification_id == 'pending') \
.filter(BaitSession.did_complete == True) \
.filter(BaitSession.received < min_datetime).all()
for bait_session in bait_sessions:
logger.debug(
'Classifying bait session with id {0} as MITM'.format(bait_session.id))
bait_session.classification = db_session.query(
Classification).filter(Classification.type == 'mitm').one()
db_session.commit()
# find and process honeypot sessions that did not get classified during
# persistence.
sessions = db_session.query(Session, Drone.name).filter(Session.discriminator == None) \
.filter(Session.timestamp <= min_datetime) \
.filter(Session.classification_id == 'pending') \
.all()
for entry in sessions:
# Check if the attack used credentials leaked by beeswarm drones
session = entry[0]
bait_match = None
for a in session.authentication:
bait_match = db_session.query(BaitSession) \
.filter(BaitSession.authentication.any(username=a.username, password=a.password)).first()
if bait_match:
break
if bait_match:
logger.debug('Classifying session with id {0} as attack which involved the reuse '
'of previously transmitted credentials.'.format(session.id))
session.classification = db_session.query(Classification).filter(
Classification.type == 'credentials_reuse').one()
elif len(session.authentication) == 0:
logger.debug(
'Classifying session with id {0} as probe.'.format(session.id))
session.classification = db_session.query(
Classification).filter(Classification.type == 'probe').one()
else:
# we have never transmitted this username/password combo
logger.debug(
'Classifying session with id {0} as bruteforce attempt.'.format(session.id))
session.classification = db_session.query(Classification).filter(
Classification.type == 'bruteforce').one()
db_session.commit()
session.name = entry[1]
self.processedSessionsPublisher.send(
'{0} {1}'.format(Messages.SESSION.value, json.dumps(session.to_dict()))) | Will classify all unclassified sessions as malicious activity.
:param delay_seconds: no sessions newer than (now - delay_seconds) will be processed. | entailment |
def start(self):
"""
Launches a new SSH client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
self.sessions[session.id] = session
logger.debug(
'Sending ssh bait session to {0}:{1}. (bait id: {2})'.format(server_host, server_port, session.id))
try:
self.connect_login()
session.did_connect = True
# TODO: Handle failed login
session.add_auth_attempt('plaintext', True, username=username, password=password)
session.did_login = True
except (SSHException, AuthenticationFailed) as err:
logger.debug('Caught exception: {0} ({1})'.format(err, str(type(err))))
else:
command_count = 0
command_limit = random.randint(6, 11)
while command_count < command_limit:
command_count += 1
self.sense()
comm, param = self.decide()
self.act(comm, param)
gevent.sleep(random.uniform(0.4, 5.6))
self.logout()
session.did_complete = True
finally:
session.alldone = True
session.end_session()
self.comm_chan.close() | Launches a new SSH client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def send_command(self, cmd):
"""
Send a command to the remote SSH server.
:param cmd: The command to send
"""
logger.debug('Sending {0} command.'.format(cmd))
self.comm_chan.sendall(cmd + '\n') | Send a command to the remote SSH server.
:param cmd: The command to send | entailment |
def get_response(self):
"""
Get the response from the server. *This may not return the full response*
:return: Response data
"""
while not self.comm_chan.recv_ready():
time.sleep(0.5)
return self.comm_chan.recv(2048) | Get the response from the server. *This may not return the full response*
:return: Response data | entailment |
def connect_login(self):
"""
Try to login to the Remote SSH Server.
:return: Response text on successful login
:raise: `AuthenticationFailed` on unsuccessful login
"""
self.client.connect(self.options['server'], self.options['port'], self.options['username'],
self.options['password'])
self.comm_chan = self.client.invoke_shell()
time.sleep(1) # Let the server take some time to get ready.
while not self.comm_chan.recv_ready():
time.sleep(0.5)
login_response = self.comm_chan.recv(2048)
if not login_response.endswith('$ '):
raise AuthenticationFailed
return login_response | Try to login to the Remote SSH Server.
:return: Response text on successful login
:raise: `AuthenticationFailed` on unsuccessful login | entailment |
def list2dict(list_of_options):
"""Transforms a list of 2 element tuples to a dictionary"""
d = {}
for key, value in list_of_options:
d[key] = value
return d | Transforms a list of 2 element tuples to a dictionary | entailment |
def path_to_ls(fn):
""" Converts an absolute path to an entry resembling the output of
the ls command on most UNIX systems."""
st = os.stat(fn)
full_mode = 'rwxrwxrwx'
mode = ''
file_time = ''
d = ''
for i in range(9):
# Incrementally builds up the 9 character string, using characters from the
# fullmode (defined above) and mode bits from the stat() system call.
mode += ((st.st_mode >> (8 - i)) & 1) and full_mode[i] or '-'
d = (os.path.isdir(fn)) and 'd' or '-'
file_time = time.strftime(' %b %d %H:%M ', time.gmtime(st.st_mtime))
list_format = '{0}{1} 1 ftp ftp {2}\t{3}{4}'.format(d, mode, str(st.st_size), file_time, os.path.basename(fn))
return list_format | Converts an absolute path to an entry resembling the output of
the ls command on most UNIX systems. | entailment |
def start(self):
""" Starts services. """
cert_path = os.path.join(self.work_dir, 'certificates')
public_keys_dir = os.path.join(cert_path, 'public_keys')
private_keys_dir = os.path.join(cert_path, 'private_keys')
client_secret_file = os.path.join(private_keys_dir, "client.key")
client_public, client_secret = zmq.auth.load_certificate(client_secret_file)
server_public_file = os.path.join(public_keys_dir, "server.key")
server_public, _ = zmq.auth.load_certificate(server_public_file)
self.outgoing_msg_greenlet = gevent.spawn(self.outgoing_server_comms, server_public,
client_public, client_secret)
self.outgoing_msg_greenlet.link_exception(self.on_exception)
self.incoming_msg_greenlet = gevent.spawn(self.incoming_server_comms, server_public,
client_public, client_secret)
self.incoming_msg_greenlet.link_exception(self.on_exception)
logger.info('Waiting for detailed configuration from Beeswarm server.')
gevent.joinall([self.outgoing_msg_greenlet]) | Starts services. | entailment |
def _start_drone(self):
"""
Restarts the drone
"""
with open(self.config_file, 'r') as config_file:
self.config = json.load(config_file, object_hook=asciify)
mode = None
if self.config['general']['mode'] == '' or self.config['general']['mode'] is None:
logger.info('Drone has not been configured, awaiting configuration from Beeswarm server.')
elif self.config['general']['mode'] == 'honeypot':
mode = Honeypot
elif self.config['general']['mode'] == 'client':
mode = Client
if mode:
self.drone = mode(self.work_dir, self.config)
self.drone_greenlet = gevent.spawn(self.drone.start)
self.drone_greenlet.link_exception(self.on_exception)
logger.info('Drone configured and running. ({0})'.format(self.id)) | Restarts the drone | entailment |
def stop(self):
"""Stops services"""
logging.debug('Stopping drone, hang on.')
if self.drone is not None:
self.drone_greenlet.unlink(self.on_exception)
self.drone.stop()
self.drone_greenlet.kill()
self.drone = None
# just some time for the drone to powerdown to be nice.
gevent.sleep(2)
if self.drone_greenlet is not None:
self.drone_greenlet.kill(timeout=5) | Stops services | entailment |
def start(self):
"""
Launches a new SMTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
username = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
session = self.create_session(server_host, server_port, honeypot_id)
logger.debug(
'Sending {0} bait session to {1}:{2}. (bait id: {3})'.format('smtp', server_host, server_port, session.id))
try:
self.connect()
session.did_connect = True
session.source_port = self.client.sock.getsockname()[1]
self.login(username, password)
# TODO: Handle failed login
# TODO: password='' is sillly fix, this needs to be fixed server side...
session.add_auth_attempt('plaintext', True, username=username, password='')
session.did_login = True
except smtplib.SMTPException as error:
logger.debug('Caught exception: {0} ({1})'.format(error, str(type(error))))
else:
while self.sent_mails <= self.max_mails:
from_addr, to_addr, mail_body = self.get_one_mail()
try:
if from_addr and to_addr and isinstance(mail_body, str):
self.client.sendmail(from_addr, to_addr, mail_body)
else:
continue
except TypeError as e:
logger.debug('Malformed email in mbox archive, skipping.')
continue
else:
self.sent_mails += 1
logger.debug('Sent mail from ({0}) to ({1})'.format(from_addr, to_addr))
time.sleep(1)
self.client.quit()
session.did_complete = True
finally:
logger.debug('SMTP Session complete.')
session.alldone = True
session.end_session()
self.client.close() | Launches a new SMTP client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def get_one_mail(self):
"""
Choose and return a random email from the mail archive.
:return: Tuple containing From Address, To Address and the mail body.
"""
while True:
mail_key = random.choice(self.mailbox.keys())
mail = self.mailbox[mail_key]
from_addr = mail.get_from()
to_addr = mail['To']
mail_body = mail.get_payload()
if not from_addr or not to_addr:
continue
return from_addr, to_addr, mail_body | Choose and return a random email from the mail archive.
:return: Tuple containing From Address, To Address and the mail body. | entailment |
def connect(self):
"""
Connect to the SMTP server.
"""
# TODO: local_hostname should be configurable
self.client = smtplib.SMTP(self.options['server'], self.options['port'],
local_hostname='local.domain', timeout=15) | Connect to the SMTP server. | entailment |
def handle(self):
"The actual service to which the user has connected."
if not self.authentication_ok():
return
if self.DOECHO:
self.writeline(self.WELCOME)
self.session_start()
while self.RUNSHELL:
read_line = self.readline(prompt=self.PROMPT).strip('\r\n')
if read_line:
self.session.transcript_incoming(read_line)
self.input = self.input_reader(self, read_line)
self.raw_input = self.input.raw
if self.input.cmd:
# TODO: Command should not be converted to upper
# looks funny in error messages.
cmd = self.input.cmd.upper()
params = self.input.params
if cmd in self.COMMANDS:
try:
self.COMMANDS[cmd](params)
except:
logger.exception('Error calling {0}.'.format(cmd))
(t, p, tb) = sys.exc_info()
if self.handleException(t, p, tb):
break
else:
self.writeline('-bash: {0}: command not found'.format(cmd))
logger.error("Unknown command '{0}'".format(cmd))
logger.debug("Exiting handler") | The actual service to which the user has connected. | entailment |
def _asciify_list(data):
""" Ascii-fies list values """
ret = []
for item in data:
if isinstance(item, unicode):
item = _remove_accents(item)
item = item.encode('utf-8')
elif isinstance(item, list):
item = _asciify_list(item)
elif isinstance(item, dict):
item = _asciify_dict(item)
ret.append(item)
return ret | Ascii-fies list values | entailment |
def _asciify_dict(data):
""" Ascii-fies dict keys and values """
ret = {}
for key, value in data.iteritems():
if isinstance(key, unicode):
key = _remove_accents(key)
key = key.encode('utf-8')
# # note new if
if isinstance(value, unicode):
value = _remove_accents(value)
value = value.encode('utf-8')
elif isinstance(value, list):
value = _asciify_list(value)
elif isinstance(value, dict):
value = _asciify_dict(value)
ret[key] = value
return ret | Ascii-fies dict keys and values | entailment |
def write_human(self, buffer_):
""" Emulates human typing speed """
if self.IAC in buffer_:
buffer_ = buffer_.replace(self.IAC, self.IAC + self.IAC)
self.msg("send %r", buffer_)
for char in buffer_:
delta = random.gauss(80, 20)
self.sock.sendall(char)
time.sleep(delta / 1000.0) | Emulates human typing speed | entailment |
def start(self):
"""
Launches a new Telnet client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself
"""
login = self.options['username']
password = self.options['password']
server_host = self.options['server']
server_port = self.options['port']
honeypot_id = self.options['honeypot_id']
command_limit = random.randint(6, 11)
session = self.create_session(server_host, server_port, honeypot_id)
self.sessions[session.id] = session
logger.debug(
'Sending telnet bait session to {0}:{1}. (bait id: {2})'.format(server_host, server_port, session.id))
try:
self.connect()
self.login(login, password)
session.add_auth_attempt('plaintext', True, username=login, password=password)
session.did_connect = True
session.source_port = self.client.sock.getsockname()[1]
session.did_login = True
except InvalidLogin:
logger.debug('Telnet session could not login. ({0})'.format(session.id))
session.did_login = False
except Exception as err:
logger.debug('Caught exception: {0} {1}'.format(err, str(err), exc_info=True))
else:
command_count = 0
while command_count < command_limit:
command_count += 1
self.sense()
comm, param = self.decide()
self.act(comm, param)
gevent.sleep(random.uniform(0.4, 5.6))
self.act('logout')
session.did_complete = True
finally:
session.alldone = True
session.end_session()
if self.client:
self.client.close() | Launches a new Telnet client session on the server taken from the `self.options` dict.
:param my_ip: IP of this Client itself | entailment |
def connect(self):
"""
Open a new telnet session on the remote server.
"""
self.client = BaitTelnetClient(self.options['server'], self.options['port'])
self.client.set_option_negotiation_callback(self.process_options) | Open a new telnet session on the remote server. | entailment |
def login(self, login, password):
"""
Login to the remote telnet server.
:param login: Username to use for logging in
:param password: Password to use for logging in
:raise: `InvalidLogin` on failed login
"""
self.client.read_until('Username: ')
self.client.write(login + '\r\n')
self.client.read_until('Password: ')
self.client.write(password + '\r\n')
current_data = self.client.read_until('$ ', 10)
if not current_data.endswith('$ '):
raise InvalidLogin | Login to the remote telnet server.
:param login: Username to use for logging in
:param password: Password to use for logging in
:raise: `InvalidLogin` on failed login | entailment |
def logout(self):
"""
Logout from the remote server.
"""
self.client.write('exit\r\n')
self.client.read_all()
self.client.close() | Logout from the remote server. | entailment |
def add_auth_attempt(self, auth_type, successful, **kwargs):
"""
:param username:
:param password:
:param auth_type: possible values:
plain: plaintext username/password
:return:
"""
entry = {'timestamp': datetime.utcnow(),
'auth': auth_type,
'id': uuid.uuid4(),
'successful': successful}
log_string = ''
for key, value in kwargs.iteritems():
if key == 'challenge' or key == 'response':
entry[key] = repr(value)
else:
entry[key] = value
log_string += '{0}:{1}, '.format(key, value)
self.login_attempts.append(entry) | :param username:
:param password:
:param auth_type: possible values:
plain: plaintext username/password
:return: | entailment |
def sense(self):
""" Launch a command in the 'senses' List, and update the current state."""
cmd_name = random.choice(self.senses)
param = ''
if cmd_name == 'ls':
if random.randint(0, 1):
param = '-l'
elif cmd_name == 'uname':
# Choose options from predefined ones
opts = 'asnrvmpio'
start = random.randint(0, len(opts) - 2)
end = random.randint(start + 1, len(opts) - 1)
param = '-{}'.format(opts[start:end])
command = getattr(self, cmd_name)
command(param) | Launch a command in the 'senses' List, and update the current state. | entailment |
def decide(self):
""" Choose the next command to execute, and its parameters, based on the current
state.
"""
next_command_name = random.choice(self.COMMAND_MAP[self.state['last_command']])
param = ''
if next_command_name == 'cd':
try:
param = random.choice(self.state['dir_list'])
except IndexError:
next_command_name = 'ls'
elif next_command_name == 'uname':
opts = 'asnrvmpio'
start = random.randint(0, len(opts) - 2)
end = random.randint(start + 1, len(opts) - 1)
param = '-{}'.format(opts[start:end])
elif next_command_name == 'ls':
if random.randint(0, 1):
param = '-l'
elif next_command_name == 'cat':
try:
param = random.choice(self.state['file_list'])
except IndexError:
param = ''.join(random.choice(string.lowercase) for x in range(3))
elif next_command_name == 'echo':
param = random.choice([
'$http_proxy',
'$https_proxy',
'$ftp_proxy',
'$BROWSER',
'$EDITOR',
'$SHELL',
'$PAGER'
])
elif next_command_name == 'sudo':
param = random.choice([
'pm-hibernate',
'shutdown -h',
'vim /etc/httpd.conf',
'vim /etc/resolve.conf',
'service network restart',
'/etc/init.d/network-manager restart',
])
return next_command_name, param | Choose the next command to execute, and its parameters, based on the current
state. | entailment |
def act(self, cmd_name, params=None):
""" Run the specified command with its parameters."""
command = getattr(self, cmd_name)
if params:
command(params)
else:
command() | Run the specified command with its parameters. | entailment |
def json_record(self, message, extra, record):
"""Prepares a JSON payload which will be logged.
Override this method to change JSON log format.
:param message: Log message, e.g., `logger.info(msg='Sign up')`.
:param extra: Dictionary that was passed as `extra` param
`logger.info('Sign up', extra={'referral_code': '52d6ce'})`.
:param record: `LogRecord` we got from `JSONFormatter.format()`.
:return: Dictionary which will be passed to JSON lib.
"""
extra['message'] = message
if 'time' not in extra:
extra['time'] = datetime.utcnow()
if record.exc_info:
extra['exc_info'] = self.formatException(record.exc_info)
return extra | Prepares a JSON payload which will be logged.
Override this method to change JSON log format.
:param message: Log message, e.g., `logger.info(msg='Sign up')`.
:param extra: Dictionary that was passed as `extra` param
`logger.info('Sign up', extra={'referral_code': '52d6ce'})`.
:param record: `LogRecord` we got from `JSONFormatter.format()`.
:return: Dictionary which will be passed to JSON lib. | entailment |
def mutate_json_record(self, json_record):
"""Override it to convert fields of `json_record` to needed types.
Default implementation converts `datetime` to string in ISO8601 format.
"""
for attr_name in json_record:
attr = json_record[attr_name]
if isinstance(attr, datetime):
json_record[attr_name] = attr.isoformat()
return json_record | Override it to convert fields of `json_record` to needed types.
Default implementation converts `datetime` to string in ISO8601 format. | entailment |
def stop(self):
""" Stop the thread. """
self._stop.set()
if self._channel is not None:
self._channel.close() | Stop the thread. | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.