signature
stringlengths
8
3.44k
body
stringlengths
0
1.41M
docstring
stringlengths
1
122k
id
stringlengths
5
17
def post_item(self, id, params):
return self.post_data("<STR_LIT>" + str(id), params)<EOL>
Change an item title/body/date
f2744:c0:m7
def upload_to_experiment(self, id, params):
return self.post_file("<STR_LIT>" + str(id), params)<EOL>
Upload a file to an experiment
f2744:c0:m8
def upload_to_item(self, id, params):
return self.post_file("<STR_LIT>" + str(id), params)<EOL>
Upload a file to an item
f2744:c0:m9
def add_tag_to_experiment(self, id, params):
return self.post_data("<STR_LIT>" + str(id), params)<EOL>
Add a tag to an experiment
f2744:c0:m10
def add_tag_to_item(self, id, params):
return self.post_data("<STR_LIT>" + str(id), params)<EOL>
Add a tag to an item
f2744:c0:m11
def __perform_request(self, url, type=GET, params=None):
if params is None:<EOL><INDENT>params = {}<EOL><DEDENT>if not self.token:<EOL><INDENT>raise SetupError("<STR_LIT>")<EOL><DEDENT>if not self.endpoint:<EOL><INDENT>raise SetupError("<STR_LIT>")<EOL><DEDENT>if self.dev:<EOL><INDENT>requests.packages.urllib3.disable_warnings()<EOL><DEDENT>url = urljoin(self.endpoint, url)<EOL>identity = lambda x: x<EOL>json_dumps = lambda x: json.dumps(x)<EOL>lookup = {<EOL>GET: (requests.get, {}, '<STR_LIT>', identity),<EOL>POST: (requests.post, {'<STR_LIT>': '<STR_LIT:application/json>'}, '<STR_LIT:data>',<EOL>json_dumps),<EOL>PUT: (requests.put, {'<STR_LIT>': '<STR_LIT:application/json>'}, '<STR_LIT:data>',<EOL>json_dumps),<EOL>DELETE: (requests.delete,<EOL>{'<STR_LIT>': '<STR_LIT:application/json>'},<EOL>'<STR_LIT:data>', json_dumps),<EOL>}<EOL>requests_method, headers, payload, transform = lookup[type]<EOL>headers.update({'<STR_LIT>': self.token})<EOL>kwargs = {'<STR_LIT>': headers, payload: transform(params)}<EOL>headers_str = str(headers).replace(self.token.strip(), '<STR_LIT>')<EOL>self._log.debug('<STR_LIT>' %<EOL>(type, url, payload, params, headers_str))<EOL>return requests_method(url, verify=False, **kwargs)<EOL>
This method will perform the real request, in this way we can customize only the "output" of the API call by using self.__call_api method. This method will return the request object.
f2745:c5:m1
def get_data(self, url, type=GET, params=None):
if params is None:<EOL><INDENT>params = dict()<EOL><DEDENT>req = self.__perform_request(url, type, params)<EOL>if req.status_code == <NUM_LIT>:<EOL><INDENT>return True<EOL><DEDENT>if req.status_code == <NUM_LIT>:<EOL><INDENT>raise NotFoundError()<EOL><DEDENT>try:<EOL><INDENT>data = req.json()<EOL><DEDENT>except ValueError as e:<EOL><INDENT>raise JSONReadError(<EOL>'<STR_LIT>' % str(e)<EOL>)<EOL><DEDENT>if not req.ok:<EOL><INDENT>msg = [data[m] for m in ("<STR_LIT:id>", "<STR_LIT:message>") if m in data][<NUM_LIT:1>]<EOL>raise DataReadError(msg)<EOL><DEDENT>return data<EOL>
This method is a basic implementation of __call_api that checks errors too. In cas of success the method will return True or the content of the response to the request.
f2745:c5:m2
def post_data(self, url, params):
url = urljoin(self.endpoint, url)<EOL>headers = {'<STR_LIT>': self.token}<EOL>req = requests.post(url, headers=headers, data=params, verify=False)<EOL>if req.status_code == <NUM_LIT>:<EOL><INDENT>return True<EOL><DEDENT>if req.status_code == <NUM_LIT>:<EOL><INDENT>raise NotFoundError()<EOL><DEDENT>try:<EOL><INDENT>data = req.json()<EOL><DEDENT>except ValueError as e:<EOL><INDENT>raise JSONReadError(<EOL>'<STR_LIT>' % str(e)<EOL>)<EOL><DEDENT>if not req.ok:<EOL><INDENT>msg = [data[m] for m in ("<STR_LIT:id>", "<STR_LIT:message>") if m in data][<NUM_LIT:1>]<EOL>raise DataReadError(msg)<EOL><DEDENT>return data<EOL>
POST some stuff to change title/date/body or create experiment
f2745:c5:m3
def post_file(self, url, params):
url = urljoin(self.endpoint, url)<EOL>headers = {'<STR_LIT>': self.token}<EOL>req = requests.post(url, headers=headers, files=params, verify=False)<EOL>if req.status_code == <NUM_LIT>:<EOL><INDENT>return True<EOL><DEDENT>if req.status_code == <NUM_LIT>:<EOL><INDENT>raise NotFoundError()<EOL><DEDENT>try:<EOL><INDENT>data = req.json()<EOL><DEDENT>except ValueError as e:<EOL><INDENT>raise JSONReadError(<EOL>'<STR_LIT>' % str(e)<EOL>)<EOL><DEDENT>if not req.ok:<EOL><INDENT>msg = [data[m] for m in ("<STR_LIT:id>", "<STR_LIT:message>") if m in data][<NUM_LIT:1>]<EOL>raise DataReadError(msg)<EOL><DEDENT>return data<EOL>
POST files
f2745:c5:m4
def get_version():
return '<STR_LIT:.>'.join(str(i) for i in VERSION[:<NUM_LIT:3>])<EOL>
Returns only digit parts of version.
f2746:m1
def render_to_json_response(self, context, **response_kwargs):
return JsonResponse(<EOL>self.get_data(context),<EOL>**response_kwargs<EOL>)<EOL>
Returns a JSON response, transforming 'context' to make the payload.
f2748:c2:m0
def get_data(self, context):
<EOL>return context<EOL>
Returns an object that will be serialized as JSON by json.dumps().
f2748:c2:m1
def anonymous_required(function):
def wrapper(*args, **kwargs):<EOL><INDENT>if args[<NUM_LIT:0>].user.is_authenticated():<EOL><INDENT>url = settings.ANONYMOUS_REQUIRED_REDIRECT_URL<EOL>return HttpResponseRedirect(reverse(url))<EOL><DEDENT>return function(*args, **kwargs)<EOL><DEDENT>return wrapper<EOL>
Redirect to user profile if user is already logged-in
f2750:m0
def startup_proc():
global NL_BASE<EOL>global NL_PROC<EOL>log.debug('<STR_LIT>')<EOL>NL_BASE = NapalmLogs(disable_security=True,<EOL>address=NAPALM_LOGS_TEST_ADDR,<EOL>port=NAPALM_LOGS_TEST_PORT,<EOL>publisher=[{'<STR_LIT>': {'<STR_LIT>': True}}],<EOL>listener=[{'<STR_LIT>': {}}],<EOL>publish_address=NAPALM_LOGS_TEST_PUB_ADDR,<EOL>publish_port=NAPALM_LOGS_TEST_PUB_PORT,<EOL>log_level=NAPALM_LOGS_TEST_LOG_LEVEL)<EOL>NL_PROC = Process(target=NL_BASE.start_engine)<EOL>NL_PROC.start()<EOL>
Startup the napalm-logs process.
f2754:m0
def startup_local_client():
time.sleep(<NUM_LIT:2>)<EOL>global TEST_CLIENT<EOL>context = zmq.Context()<EOL>TEST_CLIENT = context.socket(zmq.SUB)<EOL>TEST_CLIENT.connect('<STR_LIT>'.format(<EOL>addr=NAPALM_LOGS_TEST_PUB_ADDR,<EOL>port=NAPALM_LOGS_TEST_PUB_PORT)<EOL>)<EOL>TEST_CLIENT.setsockopt(zmq.SUBSCRIBE, b'<STR_LIT>')<EOL>
Startup a local ZMQ client to receive the published messages.
f2754:m1
def expr_match(line, expr):
if line == expr:<EOL><INDENT>return True<EOL><DEDENT>if fnmatch.fnmatch(line, expr):<EOL><INDENT>return True<EOL><DEDENT>try:<EOL><INDENT>if re.match(r'<STR_LIT>'.format(expr), line):<EOL><INDENT>return True<EOL><DEDENT><DEDENT>except re.error:<EOL><INDENT>pass<EOL><DEDENT>return False<EOL>
Evaluate a line of text against an expression. First try a full-string match, next try globbing, and then try to match assuming expr is a regular expression. Originally designed to match minion IDs for whitelists/blacklists.
f2761:m0
def check_whitelist_blacklist(value, whitelist=None, blacklist=None):
if blacklist is not None:<EOL><INDENT>if not hasattr(blacklist, '<STR_LIT>'):<EOL><INDENT>blacklist = [blacklist]<EOL><DEDENT>try:<EOL><INDENT>for expr in blacklist:<EOL><INDENT>if expr_match(value, expr):<EOL><INDENT>return False<EOL><DEDENT><DEDENT><DEDENT>except TypeError:<EOL><INDENT>log.error('<STR_LIT>'.format(blacklist))<EOL><DEDENT><DEDENT>if whitelist:<EOL><INDENT>if not hasattr(whitelist, '<STR_LIT>'):<EOL><INDENT>whitelist = [whitelist]<EOL><DEDENT>try:<EOL><INDENT>for expr in whitelist:<EOL><INDENT>if expr_match(value, expr):<EOL><INDENT>return True<EOL><DEDENT><DEDENT><DEDENT>except TypeError:<EOL><INDENT>log.error('<STR_LIT>'.format(whitelist))<EOL><DEDENT><DEDENT>else:<EOL><INDENT>return True<EOL><DEDENT>return False<EOL>
Check a whitelist and/or blacklist to see if the value matches it. value The item to check the whitelist and/or blacklist against. whitelist The list of items that are white-listed. If ``value`` is found in the whitelist, then the function returns ``True``. Otherwise, it returns ``False``. blacklist The list of items that are black-listed. If ``value`` is found in the blacklist, then the function returns ``False``. Otherwise, it returns ``True``. If both a whitelist and a blacklist are provided, value membership in the blacklist will be examined first. If the value is not found in the blacklist, then the whitelist is checked. If the value isn't found in the whitelist, the function returns ``False``.
f2761:m1
def _setup_ipc(self):
log.debug('<STR_LIT>')<EOL>self.ctx = zmq.Context()<EOL>self.sub = self.ctx.socket(zmq.SUB)<EOL>self.sub.bind(PUB_PX_IPC_URL)<EOL>self.sub.setsockopt(zmq.SUBSCRIBE, b'<STR_LIT>')<EOL>log.debug('<STR_LIT>', self.hwm)<EOL>try:<EOL><INDENT>self.sub.setsockopt(zmq.HWM, self.hwm)<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.sub.setsockopt(zmq.SNDHWM, self.hwm)<EOL><DEDENT>self.pub = self.ctx.socket(zmq.PUB)<EOL>self.pub.bind(PUB_IPC_URL)<EOL>log.debug('<STR_LIT>', self.hwm)<EOL>try:<EOL><INDENT>self.pub.setsockopt(zmq.HWM, self.hwm)<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.pub.setsockopt(zmq.SNDHWM, self.hwm)<EOL><DEDENT>
Setup the IPC PUB and SUB sockets for the proxy.
f2762:c0:m2
def start(self):
self._setup_ipc()<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>try:<EOL><INDENT>zmq.proxy(self.sub, self.pub)<EOL><DEDENT>except zmq.ZMQError as error:<EOL><INDENT>if self.__up is False:<EOL><INDENT>log.info('<STR_LIT>')<EOL>return<EOL><DEDENT>else:<EOL><INDENT>log.error(error, exc_info=True)<EOL>raise NapalmLogsExit(error)<EOL><DEDENT><DEDENT>
Listen to messages and publish them.
f2762:c0:m3
def get_interface(name):
try:<EOL><INDENT>log.debug('<STR_LIT>', name)<EOL>return BUFFER_LOOKUP[name]<EOL><DEDENT>except KeyError:<EOL><INDENT>msg = '<STR_LIT>'.format(name)<EOL>log.error(msg, exc_info=True)<EOL>raise InvalidBufferException(msg)<EOL><DEDENT>
Return the serialize function.
f2764:m0
def _setup_ipc(self):
self.ctx = zmq.Context()<EOL>log.debug('<STR_LIT>', self._transport_type, self.pub_id)<EOL>self.sub = self.ctx.socket(zmq.SUB)<EOL>self.sub.connect(PUB_IPC_URL)<EOL>self.sub.setsockopt(zmq.SUBSCRIBE, b'<STR_LIT>')<EOL>try:<EOL><INDENT>self.sub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.sub.setsockopt(zmq.RCVHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>
Subscribe to the pub IPC and publish the messages on the right transport.
f2766:c0:m2
def _setup_transport(self):
if '<STR_LIT>' in self.error_whitelist:<EOL><INDENT>log.info('<STR_LIT>', self._transport_type, self.pub_id)<EOL><DEDENT>if '<STR_LIT>' in self.error_whitelist:<EOL><INDENT>log.info('<STR_LIT>', self._transport_type, self.pub_id)<EOL><DEDENT>transport_class = get_transport(self._transport_type)<EOL>log.debug('<STR_LIT>',<EOL>self._transport_type,<EOL>self.serializer)<EOL>self.serializer_fun = get_serializer(self.serializer)<EOL>self.transport = transport_class(self.address,<EOL>self.port,<EOL>**self.publisher_opts)<EOL>self.__transport_encrypt = True<EOL>if hasattr(self.transport, '<STR_LIT>') andgetattr(self.transport, '<STR_LIT>') is True:<EOL><INDENT>self.__transport_encrypt = False<EOL><DEDENT>
Setup the transport.
f2766:c0:m3
def _prepare(self, serialized_obj):
<EOL>nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)<EOL>encrypted = self.__safe.encrypt(serialized_obj, nonce)<EOL>signed = self.__signing_key.sign(encrypted)<EOL>return signed<EOL>
Prepare the object to be sent over the untrusted channel.
f2766:c0:m4
def start(self):
<EOL>napalm_logs_publisher_received_messages = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>', '<STR_LIT:address>', '<STR_LIT:port>']<EOL>)<EOL>napalm_logs_publisher_whitelist_blacklist_check_fail = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>', '<STR_LIT:address>', '<STR_LIT:port>']<EOL>)<EOL>napalm_logs_publisher_messages_published = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>', '<STR_LIT:address>', '<STR_LIT:port>']<EOL>)<EOL>self._setup_ipc()<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>self.transport.start()<EOL>self.__up = True<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>bin_obj = self.sub.recv()<EOL><DEDENT>except zmq.ZMQError as error:<EOL><INDENT>if self.__up is False:<EOL><INDENT>log.info('<STR_LIT>')<EOL>return<EOL><DEDENT>else:<EOL><INDENT>log.error(error, exc_info=True)<EOL>raise NapalmLogsExit(error)<EOL><DEDENT><DEDENT>obj = umsgpack.unpackb(bin_obj)<EOL>if self._strip_message_details:<EOL><INDENT>obj.pop('<STR_LIT>', None)<EOL>bin_obj = self.serializer_fun(obj)<EOL><DEDENT>napalm_logs_publisher_received_messages.labels(<EOL>publisher_type=self._transport_type,<EOL>address=self.address,<EOL>port=self.port<EOL>).inc()<EOL>if not napalm_logs.ext.check_whitelist_blacklist(obj['<STR_LIT:error>'],<EOL>whitelist=self.error_whitelist,<EOL>blacklist=self.error_blacklist):<EOL><INDENT>log.debug('<STR_LIT>',<EOL>obj['<STR_LIT:error>'],<EOL>self._transport_type,<EOL>self.pub_id)<EOL>napalm_logs_publisher_whitelist_blacklist_check_fail.labels(<EOL>publisher_type=self._transport_type,<EOL>address=self.address,<EOL>port=self.port<EOL>).inc()<EOL>continue<EOL><DEDENT>serialized_obj = self._serialize(obj, bin_obj)<EOL>log.debug('<STR_LIT>')<EOL>if not self.disable_security and self.__transport_encrypt:<EOL><INDENT>serialized_obj = self._prepare(serialized_obj)<EOL><DEDENT>self.transport.publish(serialized_obj)<EOL>napalm_logs_publisher_messages_published.labels(<EOL>publisher_type=self._transport_type,<EOL>address=self.address,<EOL>port=self.port<EOL>).inc()<EOL><DEDENT>
Listen to messages and publish them.
f2766:c0:m6
def __init__(self,<EOL>address='<STR_LIT>',<EOL>port=<NUM_LIT>,<EOL>listener='<STR_LIT>',<EOL>publisher='<STR_LIT>',<EOL>publish_address='<STR_LIT>',<EOL>publish_port=<NUM_LIT>,<EOL>auth_address='<STR_LIT>',<EOL>auth_port=<NUM_LIT>,<EOL>metrics_enabled=False,<EOL>metrics_address='<STR_LIT>',<EOL>metrics_port='<STR_LIT>',<EOL>metrics_dir='<STR_LIT>',<EOL>certificate=None,<EOL>keyfile=None,<EOL>disable_security=False,<EOL>config_path=None,<EOL>config_dict=None,<EOL>extension_config_path=None,<EOL>extension_config_dict=None,<EOL>log_level='<STR_LIT>',<EOL>log_format='<STR_LIT>',<EOL>device_blacklist=[],<EOL>device_whitelist=[],<EOL>hwm=None,<EOL>device_worker_processes=<NUM_LIT:1>,<EOL>serializer='<STR_LIT>',<EOL>buffer=None):
self.address = address<EOL>self.port = port<EOL>self.listener = listener<EOL>self.publisher = publisher<EOL>self.publish_address = publish_address<EOL>self.publish_port = publish_port<EOL>self.auth_address = auth_address<EOL>self.auth_port = auth_port<EOL>self.metrics_enabled = metrics_enabled<EOL>self.metrics_address = metrics_address<EOL>self.metrics_port = metrics_port<EOL>self.metrics_dir = metrics_dir<EOL>self.certificate = certificate<EOL>self.keyfile = keyfile<EOL>self.disable_security = disable_security<EOL>self.config_path = config_path<EOL>self.config_dict = config_dict<EOL>self.extension_config_path = extension_config_path<EOL>self.extension_config_dict = extension_config_dict<EOL>self.log_level = log_level<EOL>self.log_format = log_format<EOL>self.device_whitelist = device_whitelist<EOL>self.device_blacklist = device_blacklist<EOL>self.serializer = serializer<EOL>self.device_worker_processes = device_worker_processes<EOL>self.hwm = hwm<EOL>self._buffer_cfg = buffer<EOL>self._buffer = None<EOL>self.opts = {}<EOL>self._setup_log()<EOL>self._build_config()<EOL>self._verify_config()<EOL>self._post_preparation()<EOL>self._setup_metrics()<EOL>self._setup_buffer()<EOL>self.__priv_key = None<EOL>self.__signing_key = None<EOL>self._processes = []<EOL>self.up = True<EOL>
Init the napalm-logs engine. :param address: The address to bind the syslog client. Default: 0.0.0.0. :param port: Listen port. Default: 514. :param listener: Listen type. Default: udp. :param publish_address: The address to bing when publishing the OC objects. Default: 0.0.0.0. :param publish_port: Publish port. Default: 49017.
f2767:c0:m0
def _setup_buffer(self):
if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict):<EOL><INDENT>return<EOL><DEDENT>buffer_name = list(self._buffer_cfg.keys())[<NUM_LIT:0>]<EOL>buffer_class = napalm_logs.buffer.get_interface(buffer_name)<EOL>log.debug('<STR_LIT>', buffer_name)<EOL>if '<STR_LIT>' not in self._buffer_cfg[buffer_name]:<EOL><INDENT>self._buffer_cfg[buffer_name]['<STR_LIT>'] = CONFIG.BUFFER_EXPIRE_TIME<EOL><DEDENT>self._buffer = buffer_class(**self._buffer_cfg[buffer_name])<EOL>
Setup the buffer subsystem.
f2767:c0:m3
def _setup_metrics(self):
path = os.environ.get("<STR_LIT>")<EOL>if not os.path.exists(self.metrics_dir):<EOL><INDENT>try:<EOL><INDENT>log.info("<STR_LIT>")<EOL>os.makedirs(self.metrics_dir)<EOL><DEDENT>except OSError:<EOL><INDENT>log.error("<STR_LIT>")<EOL>raise ConfigurationException("<STR_LIT>")<EOL><DEDENT>path = self.metrics_dir<EOL><DEDENT>elif path != self.metrics_dir:<EOL><INDENT>path = self.metrics_dir<EOL><DEDENT>os.environ['<STR_LIT>'] = path<EOL>log.info("<STR_LIT>")<EOL>log.debug("<STR_LIT>".format(path))<EOL>files = os.listdir(path)<EOL>for f in files:<EOL><INDENT>if f.endswith("<STR_LIT>"):<EOL><INDENT>os.remove(os.path.join(path, f))<EOL><DEDENT>log.debug("<STR_LIT>")<EOL><DEDENT>if self.metrics_enabled:<EOL><INDENT>registry = CollectorRegistry()<EOL>multiprocess.MultiProcessCollector(registry)<EOL>start_http_server(<EOL>port=self.metrics_port,<EOL>addr=self.metrics_address,<EOL>registry=registry<EOL>)<EOL><DEDENT>
Start metric exposition
f2767:c0:m4
def _setup_log(self):
logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower())<EOL>logging.basicConfig(format=self.log_format,<EOL>level=logging_level)<EOL>
Setup the log object.
f2767:c0:m5
def _post_preparation(self):
self.opts['<STR_LIT>'] = CONFIG.ZMQ_INTERNAL_HWM if self.hwm is None else self.hwm<EOL>self.opts['<STR_LIT>'] = False<EOL>for pub in self.publisher:<EOL><INDENT>pub_name = list(pub.keys())[<NUM_LIT:0>]<EOL>pub_opts = list(pub.values())[<NUM_LIT:0>]<EOL>error_whitelist = pub_opts.get('<STR_LIT>', [])<EOL>error_blacklist = pub_opts.get('<STR_LIT>', [])<EOL>if '<STR_LIT>' not in error_blacklist:<EOL><INDENT>error_blacklist.append('<STR_LIT>')<EOL><DEDENT>if '<STR_LIT>' not in error_blacklist:<EOL><INDENT>error_blacklist.append('<STR_LIT>')<EOL><DEDENT>if '<STR_LIT>' in pub_opts and pub[pub_name]['<STR_LIT>']:<EOL><INDENT>pub[pub_name]['<STR_LIT>'] = True<EOL>error_whitelist = ['<STR_LIT>']<EOL>error_blacklist = []<EOL><DEDENT>if '<STR_LIT>' in pub_opts and pub[pub_name]['<STR_LIT>']:<EOL><INDENT>pub[pub_name]['<STR_LIT>'] = True<EOL>error_whitelist = ['<STR_LIT>']<EOL>error_blacklist = []<EOL><DEDENT>if '<STR_LIT>' in pub_opts and '<STR_LIT>' in error_blacklist:<EOL><INDENT>error_blacklist.remove('<STR_LIT>')<EOL><DEDENT>if '<STR_LIT>' in pub_opts and '<STR_LIT>' in error_blacklist:<EOL><INDENT>error_blacklist.remove('<STR_LIT>')<EOL><DEDENT>self.opts['<STR_LIT>'] |= '<STR_LIT>' in error_whitelist or '<STR_LIT>' not in error_blacklist<EOL>pub[pub_name]['<STR_LIT>'] = error_whitelist<EOL>pub[pub_name]['<STR_LIT>'] = error_blacklist<EOL><DEDENT>
The steps for post-preparation (when the logs, and everything is already setup).
f2767:c0:m6
def _whitelist_blacklist(self, os_name):
return napalm_logs.ext.check_whitelist_blacklist(os_name,<EOL>whitelist=self.device_whitelist,<EOL>blacklist=self.device_blacklist)<EOL>
Determines if the OS should be ignored, depending on the whitelist-blacklist logic configured by the user.
f2767:c0:m7
@staticmethod<EOL><INDENT>def _extract_yaml_docstring(stream):<DEDENT>
comment_lines = []<EOL>lines = stream.read().splitlines()<EOL>for line in lines:<EOL><INDENT>line_strip = line.strip()<EOL>if not line_strip:<EOL><INDENT>continue<EOL><DEDENT>if line_strip.startswith('<STR_LIT:#>'):<EOL><INDENT>comment_lines.append(<EOL>line_strip.replace('<STR_LIT:#>', '<STR_LIT>', <NUM_LIT:1>).strip()<EOL>)<EOL><DEDENT>else:<EOL><INDENT>break<EOL><DEDENT><DEDENT>return '<STR_LIT:U+0020>'.join(comment_lines)<EOL>
Extract the comments at the top of the YAML file, from the stream handler. Return the extracted comment as string.
f2767:c0:m8
def _load_config(self, path):
config = {}<EOL>log.debug('<STR_LIT>', path)<EOL>if not os.path.isdir(path):<EOL><INDENT>msg = (<EOL>'<STR_LIT>'<EOL>'<STR_LIT>'<EOL>).format(path=path)<EOL>log.error(msg)<EOL>raise IOError(msg)<EOL><DEDENT>os_subdirs = [sdpath[<NUM_LIT:0>] for sdpath in os.walk(path)][<NUM_LIT:1>:]<EOL>if not os_subdirs:<EOL><INDENT>log.error('<STR_LIT>', path)<EOL><DEDENT>for os_dir in os_subdirs:<EOL><INDENT>os_name = os.path.split(os_dir)[<NUM_LIT:1>] <EOL>if os_name.startswith('<STR_LIT>'):<EOL><INDENT>log.debug('<STR_LIT>', os_name)<EOL>continue<EOL><DEDENT>if not self._whitelist_blacklist(os_name):<EOL><INDENT>log.debug('<STR_LIT>', os_name)<EOL>continue<EOL><DEDENT>log.debug('<STR_LIT>', os_name)<EOL>log.debug('<STR_LIT:=>'*<NUM_LIT>)<EOL>if os_name not in config:<EOL><INDENT>config[os_name] = {}<EOL><DEDENT>files = os.listdir(os_dir)<EOL>for file_ in files:<EOL><INDENT>log.debug('<STR_LIT>', file_)<EOL>file_name, file_extension = os.path.splitext(file_)<EOL>file_extension = file_extension.replace('<STR_LIT:.>', '<STR_LIT>')<EOL>filepath = os.path.join(os_dir, file_)<EOL>comment = '<STR_LIT>'<EOL>if file_extension in ('<STR_LIT>', '<STR_LIT>'):<EOL><INDENT>try:<EOL><INDENT>log.debug('<STR_LIT>', file_)<EOL>with open(filepath, '<STR_LIT:r>') as fstream:<EOL><INDENT>cfg = yaml.load(fstream)<EOL>if file_name not in CONFIG.OS_INIT_FILENAMES:<EOL><INDENT>fstream.seek(<NUM_LIT:0>)<EOL>comment = self._extract_yaml_docstring(fstream)<EOL>if '<STR_LIT>' in cfg:<EOL><INDENT>for message in cfg['<STR_LIT>']:<EOL><INDENT>message['<STR_LIT>'] = comment<EOL><DEDENT><DEDENT><DEDENT>napalm_logs.utils.dictupdate(config[os_name], cfg)<EOL><DEDENT><DEDENT>except yaml.YAMLError as yamlexc:<EOL><INDENT>log.error('<STR_LIT>', filepath, exc_info=True)<EOL>if file_name in CONFIG.OS_INIT_FILENAMES:<EOL><INDENT>raise IOError(yamlexc)<EOL><DEDENT><DEDENT><DEDENT>elif file_extension == '<STR_LIT>':<EOL><INDENT>log.debug('<STR_LIT>', file_)<EOL>mod_fp, mod_file, mod_data = imp.find_module(file_name, [os_dir])<EOL>mod = imp.load_module(file_name, mod_fp, mod_file, mod_data)<EOL>if file_name in CONFIG.OS_INIT_FILENAMES:<EOL><INDENT>log.debug('<STR_LIT>', filepath)<EOL>if hasattr(mod, CONFIG.INIT_RUN_FUN) andhasattr(getattr(mod, CONFIG.INIT_RUN_FUN), '<STR_LIT>'):<EOL><INDENT>if '<STR_LIT>' not in config[os_name]:<EOL><INDENT>config[os_name]['<STR_LIT>'] = []<EOL><DEDENT>config[os_name]['<STR_LIT>'].append({<EOL>'<STR_LIT>': {'<STR_LIT>': '<STR_LIT>'},<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': getattr(mod, CONFIG.INIT_RUN_FUN),<EOL>'<STR_LIT>': filepath <EOL>})<EOL>log.info('<STR_LIT>',<EOL>filepath, os_name)<EOL><DEDENT>elif file_name != '<STR_LIT>':<EOL><INDENT>log.warning('<STR_LIT>',<EOL>filepath, CONFIG.INIT_RUN_FUN)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>if hasattr(mod, '<STR_LIT>'):<EOL><INDENT>mod_tag = getattr(mod, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>log.info('<STR_LIT>', filepath, file_name)<EOL>mod_tag = file_name<EOL><DEDENT>if hasattr(mod, '<STR_LIT>'):<EOL><INDENT>mod_err = getattr(mod, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>log.info('<STR_LIT>', filepath, file_name)<EOL>mod_err = file_name<EOL><DEDENT>if hasattr(mod, '<STR_LIT>'):<EOL><INDENT>err_match = getattr(mod, '<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>err_match = '<STR_LIT>'<EOL><DEDENT>model = CONFIG.OPEN_CONFIG_NO_MODEL<EOL>if hasattr(mod, '<STR_LIT>'):<EOL><INDENT>model = getattr(mod, '<STR_LIT>')<EOL><DEDENT>log.debug('<STR_LIT>', err_match)<EOL>if hasattr(mod, CONFIG.CONFIG_RUN_FUN) andhasattr(getattr(mod, CONFIG.CONFIG_RUN_FUN), '<STR_LIT>'):<EOL><INDENT>log.debug('<STR_LIT>',<EOL>file_, mod_tag, mod_err, err_match)<EOL>if '<STR_LIT>' not in config[os_name]:<EOL><INDENT>config[os_name]['<STR_LIT>'] = []<EOL><DEDENT>config[os_name]['<STR_LIT>'].append({<EOL>'<STR_LIT>': mod_tag,<EOL>'<STR_LIT:error>': mod_err,<EOL>'<STR_LIT>': err_match,<EOL>'<STR_LIT>': mod.__doc__,<EOL>'<STR_LIT>': getattr(mod, CONFIG.CONFIG_RUN_FUN),<EOL>'<STR_LIT>': filepath, <EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': model,<EOL>'<STR_LIT>': {},<EOL>'<STR_LIT>': {'<STR_LIT>': {}, '<STR_LIT>': {}}<EOL>})<EOL><DEDENT>else:<EOL><INDENT>log.warning('<STR_LIT>',<EOL>filepath, CONFIG.CONFIG_RUN_FUN)<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>log.info('<STR_LIT>', filepath)<EOL><DEDENT><DEDENT>log.debug('<STR_LIT:->'*<NUM_LIT>)<EOL><DEDENT>if not config:<EOL><INDENT>msg = '<STR_LIT>'.format(path=path)<EOL>log.error(msg)<EOL>raise IOError(msg)<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>log.debug(config)<EOL>log.debug('<STR_LIT>', sys.getsizeof(config))<EOL>return config<EOL>
Read the configuration under a specific path and return the object.
f2767:c0:m9
def _verify_config_dict(self, valid, config, dev_os, key_path=None):
if not key_path:<EOL><INDENT>key_path = []<EOL><DEDENT>for key, value in valid.items():<EOL><INDENT>self._verify_config_key(key, value, valid, config, dev_os, key_path)<EOL><DEDENT>
Verify if the config dict is valid.
f2767:c0:m13
def _verify_config(self):
if not self.config_dict:<EOL><INDENT>self._raise_config_exception('<STR_LIT>')<EOL><DEDENT>for dev_os, dev_config in self.config_dict.items():<EOL><INDENT>if not dev_config:<EOL><INDENT>log.warning('<STR_LIT>', dev_os)<EOL>continue<EOL><DEDENT>self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os)<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>
Verify that the config is correct
f2767:c0:m14
def _build_config(self):
if not self.config_dict:<EOL><INDENT>if not self.config_path:<EOL><INDENT>self.config_path = os.path.join(<EOL>os.path.dirname(os.path.realpath(__file__)),<EOL>'<STR_LIT>'<EOL>)<EOL><DEDENT>log.info('<STR_LIT>', self.config_path)<EOL>self.config_dict = self._load_config(self.config_path)<EOL><DEDENT>if not self.extension_config_dict andself.extension_config_path andos.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): <EOL><INDENT>log.info('<STR_LIT>', self.extension_config_path)<EOL>self.extension_config_dict = self._load_config(self.extension_config_path)<EOL><DEDENT>if self.extension_config_dict:<EOL><INDENT>napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict)<EOL><DEDENT>
Build the config of the napalm syslog parser.
f2767:c0:m15
def _start_auth_proc(self):
log.debug('<STR_LIT>')<EOL>verify_key = self.__signing_key.verify_key<EOL>sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder)<EOL>log.debug('<STR_LIT>')<EOL>auth = NapalmLogsAuthProc(self.certificate,<EOL>self.keyfile,<EOL>self.__priv_key,<EOL>sgn_verify_hex,<EOL>self.auth_address,<EOL>self.auth_port)<EOL>proc = Process(target=auth.start)<EOL>proc.start()<EOL>proc.description = '<STR_LIT>'<EOL>log.debug('<STR_LIT>', proc._name, proc.pid)<EOL>return proc<EOL>
Start the authenticator process.
f2767:c0:m16
def _start_lst_proc(self,<EOL>listener_type,<EOL>listener_opts):
log.debug('<STR_LIT>', listener_type)<EOL>listener = NapalmLogsListenerProc(self.opts,<EOL>self.address,<EOL>self.port,<EOL>listener_type,<EOL>listener_opts=listener_opts)<EOL>proc = Process(target=listener.start)<EOL>proc.start()<EOL>proc.description = '<STR_LIT>'<EOL>log.debug('<STR_LIT>', proc._name, proc.pid)<EOL>return proc<EOL>
Start the listener process.
f2767:c0:m17
def _start_srv_proc(self,<EOL>started_os_proc):
log.debug('<STR_LIT>')<EOL>server = NapalmLogsServerProc(self.opts,<EOL>self.config_dict,<EOL>started_os_proc,<EOL>buffer=self._buffer)<EOL>proc = Process(target=server.start)<EOL>proc.start()<EOL>proc.description = '<STR_LIT>'<EOL>log.debug('<STR_LIT>', proc._name, proc.pid)<EOL>return proc<EOL>
Start the server process.
f2767:c0:m18
def _start_pub_proc(self,<EOL>publisher_type,<EOL>publisher_opts,<EOL>pub_id):
log.debug('<STR_LIT>', publisher_type)<EOL>publisher = NapalmLogsPublisherProc(self.opts,<EOL>self.publish_address,<EOL>self.publish_port,<EOL>publisher_type,<EOL>self.serializer,<EOL>self.__priv_key,<EOL>self.__signing_key,<EOL>publisher_opts,<EOL>disable_security=self.disable_security,<EOL>pub_id=pub_id)<EOL>proc = Process(target=publisher.start)<EOL>proc.start()<EOL>proc.description = '<STR_LIT>'<EOL>log.debug('<STR_LIT>', proc._name, proc.pid)<EOL>return proc<EOL>
Start the publisher process.
f2767:c0:m20
def _start_dev_proc(self,<EOL>device_os,<EOL>device_config):
log.info('<STR_LIT>', device_os)<EOL>dos = NapalmLogsDeviceProc(device_os,<EOL>self.opts,<EOL>device_config)<EOL>os_proc = Process(target=dos.start)<EOL>os_proc.start()<EOL>os_proc.description = '<STR_LIT>' % device_os<EOL>log.debug('<STR_LIT>', os_proc._name, device_os, os_proc.pid)<EOL>return os_proc<EOL>
Start the device worker process.
f2767:c0:m21
def start_engine(self):
if self.disable_security is True:<EOL><INDENT>log.warning('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>self.__priv_key = nacl.utils.random(nacl.secret.SecretBox.KEY_SIZE)<EOL>log.debug('<STR_LIT>')<EOL>self.__signing_key = nacl.signing.SigningKey.generate()<EOL>self._processes.append(self._start_auth_proc())<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>proc = self._start_pub_px_proc()<EOL>self._processes.append(proc)<EOL>pub_id = <NUM_LIT:0><EOL>for pub in self.publisher:<EOL><INDENT>publisher_type, publisher_opts = list(pub.items())[<NUM_LIT:0>]<EOL>proc = self._start_pub_proc(publisher_type,<EOL>publisher_opts,<EOL>pub_id)<EOL>self._processes.append(proc)<EOL>pub_id += <NUM_LIT:1><EOL><DEDENT>log.info('<STR_LIT>')<EOL>started_os_proc = []<EOL>for device_os, device_config in self.config_dict.items():<EOL><INDENT>if not self._whitelist_blacklist(device_os):<EOL><INDENT>log.debug('<STR_LIT>', device_os)<EOL>continue<EOL><DEDENT>log.debug('<STR_LIT>', self.device_worker_processes, device_os)<EOL>for proc_index in range(self.device_worker_processes):<EOL><INDENT>self._processes.append(self._start_dev_proc(device_os,<EOL>device_config))<EOL><DEDENT>started_os_proc.append(device_os)<EOL><DEDENT>self._processes.append(self._start_srv_proc(started_os_proc))<EOL>for lst in self.listener:<EOL><INDENT>listener_type, listener_opts = list(lst.items())[<NUM_LIT:0>]<EOL>proc = self._start_lst_proc(listener_type,<EOL>listener_opts)<EOL>self._processes.append(proc)<EOL><DEDENT>thread = threading.Thread(target=self._check_children)<EOL>thread.start()<EOL>
Start the child processes (one per device OS)
f2767:c0:m22
def _check_children(self):
while self.up:<EOL><INDENT>time.sleep(<NUM_LIT:1>)<EOL>for process in self._processes:<EOL><INDENT>if process.is_alive() is True:<EOL><INDENT>continue<EOL><DEDENT>log.debug('<STR_LIT>', process.description)<EOL>self.stop_engine()<EOL><DEDENT><DEDENT>
Check all of the child processes are still running
f2767:c0:m23
def emit(msg_dict):
log.debug('<STR_LIT>')<EOL>log.debug(msg_dict)<EOL>ret = {}<EOL>extracted = napalm_logs.utils.extract(_RGX, msg_dict['<STR_LIT:message>'], _RGX_PARTS)<EOL>if not extracted:<EOL><INDENT>return ret<EOL><DEDENT>uid_key_path = '<STR_LIT>'.format(extracted)<EOL>uid_value = int(extracted['<STR_LIT>'])<EOL>log.debug('<STR_LIT>', uid_value, uid_key_path)<EOL>ret.update(napalm_logs.utils.setval(uid_key_path, uid_value, dict_=ret))<EOL>login_key_path = '<STR_LIT>'.format(extracted)<EOL>ret.update(napalm_logs.utils.setval(login_key_path, True, dict_=ret))<EOL>return ret<EOL>
Extracts the details from the syslog message and returns an object having the following structure: .. code-block:: python { u'users': { u'user': { u'luke': { u'action': { u'login': True }, u'uid': 0 } } } }
f2768:m0
def _exit_gracefully(self, signum, _):
self.stop()<EOL>
Exit gracefully.
f2771:c0:m1
def _handshake(self, conn, addr):
<EOL>msg = conn.recv(len(MAGIC_REQ))<EOL>log.debug('<STR_LIT>', msg, addr)<EOL>if msg != MAGIC_REQ:<EOL><INDENT>log.warning('<STR_LIT>', msg, addr)<EOL>return<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>conn.send(self.__key)<EOL>log.debug('<STR_LIT>')<EOL>msg = conn.recv(len(MAGIC_ACK))<EOL>if msg != MAGIC_ACK:<EOL><INDENT>return<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>conn.send(self.__sgn)<EOL>log.debug('<STR_LIT>')<EOL>msg = conn.recv(len(MAGIC_ACK))<EOL>if msg != MAGIC_ACK:<EOL><INDENT>return<EOL><DEDENT>log.info('<STR_LIT>', addr)<EOL>self.keep_alive(conn)<EOL>
Ensures that the client receives the AES key.
f2771:c0:m2
def keep_alive(self, conn):
while self.__up:<EOL><INDENT>msg = conn.recv(len(AUTH_KEEP_ALIVE))<EOL>if msg != AUTH_KEEP_ALIVE:<EOL><INDENT>log.error('<STR_LIT>', AUTH_KEEP_ALIVE)<EOL>conn.close()<EOL>return<EOL><DEDENT>try:<EOL><INDENT>conn.send(AUTH_KEEP_ALIVE_ACK)<EOL><DEDENT>except (IOError, socket.error) as err:<EOL><INDENT>log.error('<STR_LIT>', err)<EOL>conn.close()<EOL>return<EOL><DEDENT><DEDENT>
Maintains auth sessions
f2771:c0:m3
def verify_cert(self):
log.debug('<STR_LIT>',<EOL>self.certificate, self.keyfile)<EOL>try:<EOL><INDENT>ssl.create_default_context().load_cert_chain(self.certificate, keyfile=self.keyfile)<EOL><DEDENT>except ssl.SSLError:<EOL><INDENT>error_string = '<STR_LIT>'<EOL>log.error(error_string)<EOL>raise SSLMismatchException(error_string)<EOL><DEDENT>except IOError:<EOL><INDENT>log.error('<STR_LIT>')<EOL>raise<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>
Checks that the provided cert and key are valid and usable
f2771:c0:m4
def _create_skt(self):
log.debug('<STR_LIT>')<EOL>if '<STR_LIT::>' in self.auth_address:<EOL><INDENT>self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)<EOL><DEDENT>else:<EOL><INDENT>self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)<EOL><DEDENT>try:<EOL><INDENT>self.socket.bind((self.auth_address, self.auth_port))<EOL><DEDENT>except socket.error as msg:<EOL><INDENT>error_string = '<STR_LIT>'.format(self.auth_port, self.auth_address, msg)<EOL>log.error(error_string, exc_info=True)<EOL>raise BindException(error_string)<EOL><DEDENT>
Create the authentication socket.
f2771:c0:m5
def start(self):
<EOL>log.debug('<STR_LIT>')<EOL>self.verify_cert()<EOL>self._create_skt()<EOL>log.debug('<STR_LIT>', AUTH_MAX_CONN)<EOL>self.socket.listen(AUTH_MAX_CONN)<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>self.__up = True<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>(clientsocket, address) = self.socket.accept()<EOL>wrapped_auth_skt = ssl.wrap_socket(clientsocket,<EOL>server_side=True,<EOL>certfile=self.certificate,<EOL>keyfile=self.keyfile)<EOL><DEDENT>except ssl.SSLError:<EOL><INDENT>log.exception('<STR_LIT>', exc_info=True)<EOL>continue<EOL><DEDENT>except socket.error as error:<EOL><INDENT>if self.__up is False:<EOL><INDENT>return<EOL><DEDENT>else:<EOL><INDENT>msg = '<STR_LIT>'.format(error)<EOL>log.error(msg, exc_info=True)<EOL>raise NapalmLogsExit(msg)<EOL><DEDENT><DEDENT>log.info('<STR_LIT>', address)<EOL>log.debug('<STR_LIT>')<EOL>client_thread = threading.Thread(target=self._handshake,<EOL>args=(wrapped_auth_skt, address))<EOL>client_thread.start()<EOL><DEDENT>
Listen to auth requests and send the AES key. Each client connection starts a new thread.
f2771:c0:m6
def stop(self):
log.info('<STR_LIT>')<EOL>self.__up = False<EOL>self.socket.close()<EOL>
Stop the auth proc.
f2771:c0:m7
def _setup_listener(self):
listener_class = get_listener(self._listener_type)<EOL>self.address = self.listener_opts.pop('<STR_LIT:address>', self.address)<EOL>self.port = self.listener_opts.pop('<STR_LIT:port>', self.port)<EOL>self.listener = listener_class(self.address,<EOL>self.port,<EOL>**self.listener_opts)<EOL>
Setup the transport.
f2772:c0:m2
def _setup_ipc(self):
log.debug('<STR_LIT>')<EOL>self.ctx = zmq.Context()<EOL>self.pub = self.ctx.socket(zmq.PUSH)<EOL>self.pub.connect(LST_IPC_URL)<EOL>log.debug('<STR_LIT>', self.opts['<STR_LIT>'])<EOL>try:<EOL><INDENT>self.pub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.pub.setsockopt(zmq.SNDHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>
Setup the listener ICP pusher.
f2772:c0:m3
def start(self):
<EOL>c_logs_ingested = Counter(<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>['<STR_LIT>', '<STR_LIT:address>', '<STR_LIT:port>'],<EOL>)<EOL>c_messages_published = Counter(<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>['<STR_LIT>', '<STR_LIT:address>', '<STR_LIT:port>'],<EOL>)<EOL>self._setup_ipc()<EOL>log.debug('<STR_LIT>', self._listener_type)<EOL>self._setup_listener()<EOL>self.listener.start()<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>self.__up = True<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>log_message, log_source = self.listener.receive()<EOL><DEDENT>except ListenerException as lerr:<EOL><INDENT>if self.__up is False:<EOL><INDENT>log.info('<STR_LIT>')<EOL>return<EOL><DEDENT>else:<EOL><INDENT>log.error(lerr, exc_info=True)<EOL>raise NapalmLogsExit(lerr)<EOL><DEDENT><DEDENT>log.debug('<STR_LIT>', log_message, log_source)<EOL>if not log_message:<EOL><INDENT>log.info('<STR_LIT>', log_source)<EOL>continue<EOL><DEDENT>c_logs_ingested.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc()<EOL>self.pub.send(umsgpack.packb((log_message, log_source)))<EOL>c_messages_published.labels(listener_type=self._listener_type, address=self.address, port=self.port).inc()<EOL><DEDENT>
Listen to messages and publish them.
f2772:c0:m4
def _setup_ipc(self):
log.debug('<STR_LIT>')<EOL>self.ctx = zmq.Context()<EOL>self.sub = self.ctx.socket(zmq.PULL)<EOL>self.sub.bind(LST_IPC_URL)<EOL>try:<EOL><INDENT>self.sub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.sub.setsockopt(zmq.RCVHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>self.pub = self.ctx.socket(zmq.ROUTER)<EOL>self.pub.bind(DEV_IPC_URL)<EOL>try:<EOL><INDENT>self.pub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.pub.setsockopt(zmq.SNDHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>self.publisher_pub = self.ctx.socket(zmq.PUB)<EOL>self.publisher_pub.connect(PUB_PX_IPC_URL)<EOL>try:<EOL><INDENT>self.publisher_pub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>
Setup the IPC pub and sub. Subscript to the listener IPC and publish to the device specific IPC.
f2773:c0:m2
def _cleanup_buffer(self):
if not self._buffer:<EOL><INDENT>return<EOL><DEDENT>while True:<EOL><INDENT>time.sleep(<NUM_LIT>)<EOL>log.debug('<STR_LIT>')<EOL>items = self._buffer.items()<EOL>log.debug('<STR_LIT>')<EOL>log.debug(list(items))<EOL><DEDENT>
Periodically cleanup the buffer.
f2773:c0:m3
def _compile_prefixes(self):
self.compiled_prefixes = {}<EOL>for dev_os, os_config in self.config.items():<EOL><INDENT>if not os_config:<EOL><INDENT>continue<EOL><DEDENT>self.compiled_prefixes[dev_os] = []<EOL>for prefix in os_config.get('<STR_LIT>', []):<EOL><INDENT>values = prefix.get('<STR_LIT>', {})<EOL>line = prefix.get('<STR_LIT>', '<STR_LIT>')<EOL>if prefix.get('<STR_LIT>'):<EOL><INDENT>self.compiled_prefixes[dev_os].append({<EOL>'<STR_LIT>': prefix['<STR_LIT>'],<EOL>'<STR_LIT>': prefix['<STR_LIT>']<EOL>})<EOL>continue <EOL><DEDENT>line = '<STR_LIT>'.format(line)<EOL>values['<STR_LIT>'] = r'<STR_LIT>'<EOL>values['<STR_LIT:message>'] = '<STR_LIT>'<EOL>position = {}<EOL>for key in values.keys():<EOL><INDENT>position[line.find('<STR_LIT:{>' + key + '<STR_LIT:}>')] = key<EOL><DEDENT>sorted_position = {}<EOL>for i, elem in enumerate(sorted(position.items())):<EOL><INDENT>sorted_position[elem[<NUM_LIT:1>]] = i + <NUM_LIT:1><EOL><DEDENT>escaped = re.escape(line).replace(r'<STR_LIT>', '<STR_LIT:{>').replace(r'<STR_LIT>', '<STR_LIT:}>')<EOL>escaped = escaped.replace(r'<STR_LIT>', r'<STR_LIT>')<EOL>self.compiled_prefixes[dev_os].append({<EOL>'<STR_LIT>': re.compile(escaped.format(**values)),<EOL>'<STR_LIT>': sorted_position,<EOL>'<STR_LIT>': escaped.format(**values),<EOL>'<STR_LIT>': values<EOL>})<EOL><DEDENT><DEDENT>
Create a dict of all OS prefixes and their compiled regexs
f2773:c0:m4
def _identify_prefix(self, msg, data):
prefix_id = -<NUM_LIT:1><EOL>for prefix in data:<EOL><INDENT>msg_dict = {}<EOL>prefix_id += <NUM_LIT:1><EOL>match = None<EOL>if '<STR_LIT>' in prefix:<EOL><INDENT>log.debug('<STR_LIT>', prefix['<STR_LIT>'])<EOL>try:<EOL><INDENT>match = prefix['<STR_LIT>'](msg)<EOL><DEDENT>except Exception:<EOL><INDENT>log.error('<STR_LIT>',<EOL>msg, prefix['<STR_LIT>'], exc_info=True)<EOL><DEDENT><DEDENT>else:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>log.debug(prefix['<STR_LIT>'])<EOL>match = prefix['<STR_LIT>'].search(msg)<EOL><DEDENT>if not match:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>continue<EOL><DEDENT>if '<STR_LIT>' in prefix:<EOL><INDENT>log.debug('<STR_LIT>', msg, prefix['<STR_LIT>'])<EOL>msg_dict = match <EOL><DEDENT>else:<EOL><INDENT>positions = prefix.get('<STR_LIT>', {})<EOL>values = prefix.get('<STR_LIT>')<EOL>msg_dict = {}<EOL>for key in values.keys():<EOL><INDENT>msg_dict[key] = match.group(positions.get(key))<EOL><DEDENT><DEDENT>msg_dict['<STR_LIT>'] = prefix_id<EOL>msg_dict['<STR_LIT:message>'] = msg_dict['<STR_LIT:message>'].strip()<EOL>if '<STR_LIT>' in msg_dict:<EOL><INDENT>msg_dict['<STR_LIT>'] = int(int(msg_dict['<STR_LIT>']) / <NUM_LIT:8>)<EOL>msg_dict['<STR_LIT>'] = int(int(msg_dict['<STR_LIT>']) - (msg_dict['<STR_LIT>'] * <NUM_LIT:8>))<EOL><DEDENT>return msg_dict<EOL><DEDENT>
Check the message again each OS prefix and if matched return the message dict
f2773:c0:m5
def _identify_os(self, msg):
ret = []<EOL>for dev_os, data in self.compiled_prefixes.items():<EOL><INDENT>log.debug('<STR_LIT>', dev_os)<EOL>msg_dict = self._identify_prefix(msg, data)<EOL>if msg_dict:<EOL><INDENT>log.debug('<STR_LIT>', dev_os)<EOL>ret.append((dev_os, msg_dict))<EOL><DEDENT>else:<EOL><INDENT>log.debug('<STR_LIT>', dev_os)<EOL><DEDENT><DEDENT>if not ret:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>msg_dict = {'<STR_LIT:message>': msg}<EOL>ret.append((None, msg_dict))<EOL><DEDENT>return ret<EOL>
Using the prefix of the syslog message, we are able to identify the operating system and then continue parsing.
f2773:c0:m6
def start(self):
<EOL>napalm_logs_server_messages_received = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"<EOL>)<EOL>napalm_logs_server_skipped_buffered_messages = Counter(<EOL>'<STR_LIT>',<EOL>'<STR_LIT>',<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_server_messages_with_identified_os = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_server_messages_without_identified_os = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"<EOL>)<EOL>napalm_logs_server_messages_failed_device_queuing = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_server_messages_device_queued = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_server_messages_unknown_queued = Counter(<EOL>"<STR_LIT>",<EOL>"<STR_LIT>"<EOL>)<EOL>self._setup_ipc()<EOL>cleanup = threading.Thread(target=self._cleanup_buffer)<EOL>cleanup.start()<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>self.__up = True<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>bin_obj = self.sub.recv()<EOL>msg, address = umsgpack.unpackb(bin_obj, use_list=False)<EOL><DEDENT>except zmq.ZMQError as error:<EOL><INDENT>if self.__up is False:<EOL><INDENT>log.info('<STR_LIT>')<EOL>return<EOL><DEDENT>else:<EOL><INDENT>log.error(error, exc_info=True)<EOL>raise NapalmLogsExit(error)<EOL><DEDENT><DEDENT>if six.PY3:<EOL><INDENT>msg = str(msg, '<STR_LIT:utf-8>')<EOL><DEDENT>else:<EOL><INDENT>msg = msg.encode('<STR_LIT:utf-8>')<EOL><DEDENT>log.debug('<STR_LIT>', address, msg, time.time())<EOL>napalm_logs_server_messages_received.inc()<EOL>os_list = self._identify_os(msg)<EOL>for dev_os, msg_dict in os_list:<EOL><INDENT>if dev_os and dev_os in self.started_os_proc:<EOL><INDENT>log.debug('<STR_LIT>', dev_os)<EOL>log.debug('<STR_LIT>', dev_os)<EOL>if six.PY3:<EOL><INDENT>dev_os = bytes(dev_os, '<STR_LIT:utf-8>')<EOL><DEDENT>if self._buffer:<EOL><INDENT>message = '<STR_LIT>'.format(dev_os=dev_os,<EOL>host=msg_dict['<STR_LIT:host>'],<EOL>msg=msg_dict['<STR_LIT:message>'])<EOL>message_key = base64.b64encode(message)<EOL>if self._buffer[message_key]:<EOL><INDENT>log.info('<STR_LIT>', msg_dict['<STR_LIT:message>'])<EOL>napalm_logs_server_skipped_buffered_messages.labels(device_os=dev_os).inc()<EOL>continue<EOL><DEDENT>log.debug('<STR_LIT>', msg_dict['<STR_LIT:message>'])<EOL>self._buffer[message_key] = <NUM_LIT:1><EOL><DEDENT>self.pub.send_multipart([dev_os,<EOL>umsgpack.packb((msg_dict, address))])<EOL>napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc()<EOL>napalm_logs_server_messages_device_queued.labels(device_os=dev_os).inc()<EOL><DEDENT>elif dev_os and dev_os not in self.started_os_proc:<EOL><INDENT>log.info('<STR_LIT>', dev_os)<EOL>napalm_logs_server_messages_with_identified_os.labels(device_os=dev_os).inc()<EOL>napalm_logs_server_messages_failed_device_queuing.labels(device_os=dev_os).inc()<EOL><DEDENT>elif not dev_os and self.opts['<STR_LIT>']:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>to_publish = {<EOL>'<STR_LIT>': address,<EOL>'<STR_LIT:host>': '<STR_LIT>',<EOL>'<STR_LIT>': int(time.time()),<EOL>'<STR_LIT>': msg_dict,<EOL>'<STR_LIT>': UNKNOWN_DEVICE_NAME,<EOL>'<STR_LIT:error>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'<EOL>}<EOL>self.publisher_pub.send(umsgpack.packb(to_publish))<EOL>napalm_logs_server_messages_unknown_queued.inc()<EOL>napalm_logs_server_messages_without_identified_os.inc()<EOL><DEDENT><DEDENT><DEDENT>
Take the messages from the queue, inspect and identify the operating system, then queue the message correspondingly.
f2773:c0:m7
def bgp_state_convert(state):
state_dict = {'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>'}<EOL>return state_dict.get(state, state.upper())<EOL>
Given a matched BGP state, map it to a vendor agnostic version.
f2774:m2
def bfd_state_convert(state):
state_dict = {'<STR_LIT>': '<STR_LIT>'}<EOL>return state_dict.get(state, state.upper())<EOL>
Given a matched BFD state, map it to a vendor agnostic version.
f2774:m3
def unserialize(binary):
return umsgpack.unpackb(binary)<EOL>
Unpack the original OpenConfig object, serialized using MessagePack. This is to be used when disable_security is set.
f2774:m4
def setval(key, val, dict_=None, delim=defaults.DEFAULT_DELIM):
if not dict_:<EOL><INDENT>dict_ = {}<EOL><DEDENT>prev_hier = dict_<EOL>dict_hier = key.split(delim)<EOL>for each in dict_hier[:-<NUM_LIT:1>]:<EOL><INDENT>if isinstance(each, six.string_type):<EOL><INDENT>if each not in prev_hier:<EOL><INDENT>prev_hier[each] = {}<EOL><DEDENT>prev_hier = prev_hier[each]<EOL><DEDENT>else:<EOL><INDENT>prev_hier[each] = [{}]<EOL>prev_hier = prev_hier[each]<EOL><DEDENT><DEDENT>prev_hier[dict_hier[-<NUM_LIT:1>]] = val<EOL>return dict_<EOL>
Set a value under the dictionary hierarchy identified under the key. The target 'foo/bar/baz' returns the dictionary hierarchy {'foo': {'bar': {'baz': {}}}}. .. note:: Currently this doesn't work with integers, i.e. cannot build lists dynamically. TODO
f2774:m6
def traverse(data, key, delim=defaults.DEFAULT_DELIM):
for each in key.split(delim):<EOL><INDENT>if isinstance(data, list):<EOL><INDENT>if isinstance(each, six.string_type):<EOL><INDENT>embed_match = False<EOL>for embedded in (x for x in data if isinstance(x, dict)):<EOL><INDENT>try:<EOL><INDENT>data = embedded[each]<EOL>embed_match = True<EOL>break<EOL><DEDENT>except KeyError:<EOL><INDENT>pass<EOL><DEDENT><DEDENT>if not embed_match:<EOL><INDENT>return None<EOL><DEDENT><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>data = data[int(each)]<EOL><DEDENT>except IndexError:<EOL><INDENT>return None<EOL><DEDENT><DEDENT><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>data = data[each]<EOL><DEDENT>except (KeyError, TypeError):<EOL><INDENT>return None<EOL><DEDENT><DEDENT><DEDENT>return data<EOL>
Traverse a dict or list using a slash delimiter target string. The target 'foo/bar/0' will return data['foo']['bar'][0] if this value exists, otherwise will return empty dict. Return None when not found. This can be used to verify if a certain key exists under dictionary hierarchy.
f2774:m7
def dictupdate(dest, upd):
recursive_update = True<EOL>if (not isinstance(dest, collections.Mapping))or (not isinstance(upd, collections.Mapping)):<EOL><INDENT>raise TypeError('<STR_LIT>')<EOL><DEDENT>updkeys = list(upd.keys())<EOL>if not set(list(dest.keys())) & set(updkeys):<EOL><INDENT>recursive_update = False<EOL><DEDENT>if recursive_update:<EOL><INDENT>for key in updkeys:<EOL><INDENT>val = upd[key]<EOL>try:<EOL><INDENT>dest_subkey = dest.get(key, None)<EOL><DEDENT>except AttributeError:<EOL><INDENT>dest_subkey = None<EOL><DEDENT>if isinstance(dest_subkey, collections.Mapping)and isinstance(val, collections.Mapping):<EOL><INDENT>ret = dictupdate(dest_subkey, val)<EOL>dest[key] = ret<EOL><DEDENT>elif isinstance(dest_subkey, list)and isinstance(val, list):<EOL><INDENT>merged = copy.deepcopy(dest_subkey)<EOL>merged.extend([x for x in val if x not in merged])<EOL>dest[key] = merged<EOL><DEDENT>else:<EOL><INDENT>dest[key] = upd[key]<EOL><DEDENT><DEDENT>return dest<EOL><DEDENT>else:<EOL><INDENT>try:<EOL><INDENT>for k in upd:<EOL><INDENT>dest[k] = upd[k]<EOL><DEDENT><DEDENT>except AttributeError:<EOL><INDENT>for k in upd:<EOL><INDENT>dest[k] = upd[k]<EOL><DEDENT><DEDENT>return dest<EOL><DEDENT>
Recursive version of the default dict.update Merges upd recursively into dest.
f2774:m8
def _start_keep_alive(self):
keep_alive_thread = threading.Thread(target=self.keep_alive)<EOL>keep_alive_thread.daemon = True<EOL>keep_alive_thread.start()<EOL>
Start the keep alive thread as a daemon
f2774:c0:m1
def keep_alive(self):
self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL)<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)<EOL><DEDENT>except socket.error:<EOL><INDENT>log.error('<STR_LIT>')<EOL>log.error('<STR_LIT>')<EOL>self.reconnect()<EOL>log.debug('<STR_LIT>')<EOL>self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)<EOL><DEDENT>msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK))<EOL>log.debug('<STR_LIT>', msg)<EOL>if msg != defaults.AUTH_KEEP_ALIVE_ACK:<EOL><INDENT>log.error('<STR_LIT>',<EOL>msg, defaults.AUTH_KEEP_ALIVE_ACK)<EOL>log.error('<STR_LIT>')<EOL>self.reconnect()<EOL><DEDENT>time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL)<EOL><DEDENT>
Send a keep alive request periodically to make sure that the server is still alive. If not then try to reconnect.
f2774:c0:m2
def reconnect(self):
log.debug('<STR_LIT>')<EOL>try:<EOL><INDENT>self.ssl_skt.close()<EOL><DEDENT>except socket.error:<EOL><INDENT>log.error('<STR_LIT>')<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>self.authenticate()<EOL>
Try to reconnect and re-authenticate with the server.
f2774:c0:m3
def authenticate(self):
log.debug('<STR_LIT>',<EOL>self.address, self.port, self.certificate)<EOL>if '<STR_LIT::>' in self.address:<EOL><INDENT>skt_ver = socket.AF_INET6<EOL><DEDENT>else:<EOL><INDENT>skt_ver = socket.AF_INET<EOL><DEDENT>skt = socket.socket(skt_ver, socket.SOCK_STREAM)<EOL>self.ssl_skt = ssl.wrap_socket(skt,<EOL>ca_certs=self.certificate,<EOL>cert_reqs=ssl.CERT_REQUIRED)<EOL>try:<EOL><INDENT>self.ssl_skt.connect((self.address, self.port))<EOL>self.auth_try_id = <NUM_LIT:0><EOL><DEDENT>except socket.error as err:<EOL><INDENT>log.error('<STR_LIT>')<EOL>self.auth_try_id += <NUM_LIT:1><EOL>if not self.max_try or self.auth_try_id < self.max_try:<EOL><INDENT>log.error('<STR_LIT>', self.timeout)<EOL>time.sleep(self.timeout)<EOL>self.authenticate()<EOL><DEDENT>log.critical('<STR_LIT>',<EOL>self.address, self.port, self.certificate)<EOL>raise ClientConnectException(err)<EOL><DEDENT>self.ssl_skt.write(defaults.MAGIC_REQ)<EOL>private_key = self.ssl_skt.recv(defaults.BUFFER_SIZE)<EOL>self.ssl_skt.write(defaults.MAGIC_ACK)<EOL>verify_key_hex = self.ssl_skt.recv(defaults.BUFFER_SIZE)<EOL>self.ssl_skt.write(defaults.MAGIC_ACK)<EOL>self.priv_key = nacl.secret.SecretBox(private_key)<EOL>self.verify_key = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder)<EOL>
Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm.
f2774:c0:m4
def decrypt(self, binary):
try:<EOL><INDENT>encrypted = self.verify_key.verify(binary)<EOL><DEDENT>except BadSignatureError:<EOL><INDENT>log.error('<STR_LIT>', exc_info=True)<EOL>raise BadSignatureException('<STR_LIT>')<EOL><DEDENT>try:<EOL><INDENT>packed = self.priv_key.decrypt(encrypted)<EOL><DEDENT>except CryptoError:<EOL><INDENT>log.error('<STR_LIT>', exc_info=True)<EOL>raise CryptoException('<STR_LIT>')<EOL><DEDENT>return umsgpack.unpackb(packed)<EOL>
Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted.
f2774:c0:m5
def stop(self):
self.__up = False<EOL>self.ssl_skt.close()<EOL>
Stop the client.
f2774:c0:m6
def get_serializer(name):
try:<EOL><INDENT>log.debug('<STR_LIT>', name)<EOL>return SERIALIZER_LOOKUP[name]<EOL><DEDENT>except KeyError:<EOL><INDENT>msg = '<STR_LIT>'.format(name)<EOL>log.error(msg, exc_info=True)<EOL>raise InvalidSerializerException(msg)<EOL><DEDENT>
Return the serialize function.
f2776:m0
def start(self):
log.debug('<STR_LIT>',<EOL>self.bootstrap_servers,<EOL>self.group_id)<EOL>try:<EOL><INDENT>self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers,<EOL>group_id=self.group_id)<EOL><DEDENT>except kafka.errors.NoBrokersAvailable as err:<EOL><INDENT>log.error(err, exc_info=True)<EOL>raise ListenerException(err)<EOL><DEDENT>log.debug('<STR_LIT>', self.topic)<EOL>self.consumer.subscribe(topics=[self.topic])<EOL>
Startup the kafka consumer.
f2777:c0:m1
def receive(self):
try:<EOL><INDENT>msg = next(self.consumer)<EOL><DEDENT>except ValueError as error:<EOL><INDENT>log.error('<STR_LIT>', error, exc_info=True)<EOL>raise ListenerException(error)<EOL><DEDENT>log_source = msg.key<EOL>try:<EOL><INDENT>decoded = json.loads(msg.value.decode('<STR_LIT:utf-8>'))<EOL><DEDENT>except ValueError:<EOL><INDENT>log.error('<STR_LIT>', msg.value.decode('<STR_LIT:utf-8>'))<EOL>return '<STR_LIT>', '<STR_LIT>'<EOL><DEDENT>log_message = decoded.get('<STR_LIT:message>')<EOL>log.debug('<STR_LIT>', log_message, log_source, time.time())<EOL>return log_message, log_source<EOL>
Return the message received and the address.
f2777:c0:m2
def stop(self):
log.info('<STR_LIT>')<EOL>self.consumer.unsubscribe()<EOL>self.consumer.close()<EOL>
Shutdown kafka consumer.
f2777:c0:m3
def start(self):
pass<EOL>
Starts the listener.
f2778:c0:m1
def receive(self):
pass<EOL>
Return an object read from the source, and the location identification object.
f2778:c0:m2
def stop(self):
pass<EOL>
Shuts down the listener.
f2778:c0:m3
def _client_connection(self, conn, addr):
log.debug('<STR_LIT>', addr[<NUM_LIT:0>], addr[<NUM_LIT:1>])<EOL>conn.settimeout(self.socket_timeout)<EOL>try:<EOL><INDENT>while self.__up:<EOL><INDENT>msg = conn.recv(self.buffer_size)<EOL>if not msg:<EOL><INDENT>continue<EOL><DEDENT>log.debug('<STR_LIT>', time.time(), msg, addr)<EOL>self.buffer.put((msg, '<STR_LIT>'.format(addr[<NUM_LIT:0>], addr[<NUM_LIT:1>])))<EOL><DEDENT><DEDENT>except socket.timeout:<EOL><INDENT>if not self.__up:<EOL><INDENT>return<EOL><DEDENT>log.debug('<STR_LIT>', addr[<NUM_LIT:1>], addr[<NUM_LIT:0>])<EOL>raise ListenerException('<STR_LIT>' % addr)<EOL><DEDENT>finally:<EOL><INDENT>log.debug('<STR_LIT>', addr)<EOL>conn.close()<EOL><DEDENT>
Handle the connecition with one client.
f2779:c0:m1
def _serve_clients(self):
self.__up = True<EOL>while self.__up:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>try:<EOL><INDENT>conn, addr = self.skt.accept()<EOL>log.debug('<STR_LIT>', addr[<NUM_LIT:0>], addr[<NUM_LIT:1>])<EOL><DEDENT>except socket.error as error:<EOL><INDENT>if not self.__up:<EOL><INDENT>return<EOL><DEDENT>msg = '<STR_LIT>'.format(error)<EOL>log.error(msg, exc_info=True)<EOL>raise ListenerException(msg)<EOL><DEDENT>client_thread = threading.Thread(target=self._client_connection, args=(conn, addr,))<EOL>client_thread.start()<EOL><DEDENT>
Accept cients and serve, one separate thread per client.
f2779:c0:m2
def start(self):
log.debug('<STR_LIT>')<EOL>if '<STR_LIT::>' in self.address:<EOL><INDENT>self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)<EOL><DEDENT>else:<EOL><INDENT>self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)<EOL><DEDENT>if self.reuse_port:<EOL><INDENT>self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, <NUM_LIT:1>)<EOL>if hasattr(socket, '<STR_LIT>'):<EOL><INDENT>self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, <NUM_LIT:1>)<EOL><DEDENT>else:<EOL><INDENT>log.error('<STR_LIT>')<EOL><DEDENT><DEDENT>try:<EOL><INDENT>self.skt.bind((self.address, int(self.port)))<EOL><DEDENT>except socket.error as msg:<EOL><INDENT>error_string = '<STR_LIT>'.format(self.port, self.address, msg)<EOL>log.error(error_string, exc_info=True)<EOL>raise BindException(error_string)<EOL><DEDENT>log.debug('<STR_LIT>', self.max_clients)<EOL>self.skt.listen(self.max_clients)<EOL>self.thread_serve = threading.Thread(target=self._serve_clients)<EOL>self.thread_serve.start()<EOL>
Start listening for messages.
f2779:c0:m3
def receive(self):
while self.buffer.empty() and self.__up:<EOL><INDENT>sleep_ms = random.randint(<NUM_LIT:0>, <NUM_LIT:1000>)<EOL>time.sleep(sleep_ms / <NUM_LIT>)<EOL><DEDENT>if not self.buffer.empty():<EOL><INDENT>return self.buffer.get(block=False)<EOL><DEDENT>return '<STR_LIT>', '<STR_LIT>'<EOL>
Return one message dequeued from the listen buffer.
f2779:c0:m4
def stop(self):
log.info('<STR_LIT>')<EOL>self.__up = False<EOL>try:<EOL><INDENT>self.skt.shutdown(socket.SHUT_RDWR)<EOL><DEDENT>except socket.error:<EOL><INDENT>log.error('<STR_LIT>', exc_info=True)<EOL><DEDENT>self.skt.close()<EOL>
Closing the socket.
f2779:c0:m5
def get_listener(name):
try:<EOL><INDENT>log.debug('<STR_LIT>', name)<EOL>return LISTENER_LOOKUP[name]<EOL><DEDENT>except KeyError:<EOL><INDENT>msg = '<STR_LIT>'.format(name)<EOL>log.error(msg, exc_info=True)<EOL>raise InvalidListenerException(msg)<EOL><DEDENT>
Return the listener class.
f2780:m0
def start(self):
zmq_uri = '<STR_LIT>'.format(<EOL>protocol=self.protocol,<EOL>address=self.address,<EOL>port=self.port<EOL>) if self.port else'<STR_LIT>'.format( <EOL>protocol=self.protocol,<EOL>address=self.address<EOL>)<EOL>log.debug('<STR_LIT>', zmq_uri)<EOL>self.ctx = zmq.Context()<EOL>if hasattr(zmq, self.type):<EOL><INDENT>skt_type = getattr(zmq, self.type)<EOL><DEDENT>else:<EOL><INDENT>skt_type = zmq.PULL<EOL><DEDENT>self.sub = self.ctx.socket(skt_type)<EOL>self.sub.connect(zmq_uri)<EOL>if self.hwm is not None:<EOL><INDENT>try:<EOL><INDENT>self.sub.setsockopt(zmq.HWM, self.hwm)<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.sub.setsockopt(zmq.RCVHWM, self.hwm)<EOL><DEDENT><DEDENT>if self.recvtimeout is not None:<EOL><INDENT>log.debug('<STR_LIT>', self.recvtimeout)<EOL>self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout)<EOL><DEDENT>if self.keepalive is not None:<EOL><INDENT>log.debug('<STR_LIT>', self.keepalive)<EOL>self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive)<EOL><DEDENT>if self.keepalive_idle is not None:<EOL><INDENT>log.debug('<STR_LIT>', self.keepalive_idle)<EOL>self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle)<EOL><DEDENT>if self.keepalive_interval is not None:<EOL><INDENT>log.debug('<STR_LIT>', self.keepalive_interval)<EOL>self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval)<EOL><DEDENT>
Startup the zmq consumer.
f2781:c0:m1
def receive(self):
try:<EOL><INDENT>msg = self.sub.recv()<EOL><DEDENT>except zmq.Again as error:<EOL><INDENT>log.error('<STR_LIT>', error, exc_info=True)<EOL>raise ListenerException(error)<EOL><DEDENT>log.debug('<STR_LIT>', time.time(), msg)<EOL>return msg, '<STR_LIT>'<EOL>
Return the message received. ..note:: In ZMQ we are unable to get the address where we got the message from.
f2781:c0:m2
def stop(self):
log.info('<STR_LIT>')<EOL>self.sub.close()<EOL>self.ctx.term()<EOL>
Shutdown zmq listener.
f2781:c0:m3
def start(self):
if '<STR_LIT::>' in self.address:<EOL><INDENT>self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)<EOL><DEDENT>else:<EOL><INDENT>self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)<EOL><DEDENT>if self.reuse_port:<EOL><INDENT>self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, <NUM_LIT:1>)<EOL>if hasattr(socket, '<STR_LIT>'):<EOL><INDENT>self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, <NUM_LIT:1>)<EOL><DEDENT>else:<EOL><INDENT>log.error('<STR_LIT>')<EOL><DEDENT><DEDENT>try:<EOL><INDENT>self.skt.bind((self.address, int(self.port)))<EOL><DEDENT>except socket.error as msg:<EOL><INDENT>error_string = '<STR_LIT>'.format(self.port, self.address, msg)<EOL>log.error(error_string, exc_info=True)<EOL>raise BindException(error_string)<EOL><DEDENT>
Create the UDP listener socket.
f2782:c0:m1
def receive(self):
try:<EOL><INDENT>msg, addr = self.skt.recvfrom(self.buffer_size)<EOL><DEDENT>except socket.error as error:<EOL><INDENT>log.error('<STR_LIT>', error, exc_info=True)<EOL>raise ListenerException(error)<EOL><DEDENT>log.debug('<STR_LIT>', msg, addr, time.time())<EOL>return msg, addr[<NUM_LIT:0>]<EOL>
Return the message received and the address.
f2782:c0:m2
def stop(self):
log.info('<STR_LIT>')<EOL>self.skt.close()<EOL>
Shut down the UDP listener.
f2782:c0:m3
def _setup_ipc(self):
self.ctx = zmq.Context()<EOL>log.debug('<STR_LIT>', self._name)<EOL>self.sub = self.ctx.socket(zmq.DEALER)<EOL>if six.PY2:<EOL><INDENT>self.sub.setsockopt(zmq.IDENTITY, self._name)<EOL><DEDENT>elif six.PY3:<EOL><INDENT>self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, '<STR_LIT:utf-8>'))<EOL><DEDENT>try:<EOL><INDENT>self.sub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.sub.setsockopt(zmq.RCVHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>self.sub.connect(DEV_IPC_URL)<EOL>self.pub = self.ctx.socket(zmq.PUB)<EOL>self.pub.connect(PUB_PX_IPC_URL)<EOL>try:<EOL><INDENT>self.pub.setsockopt(zmq.HWM, self.opts['<STR_LIT>'])<EOL><DEDENT>except AttributeError:<EOL><INDENT>self.pub.setsockopt(zmq.SNDHWM, self.opts['<STR_LIT>'])<EOL><DEDENT>
Subscribe to the right topic in the device IPC and publish to the publisher proxy.
f2783:c0:m2
def _compile_messages(self):
self.compiled_messages = []<EOL>if not self._config:<EOL><INDENT>return<EOL><DEDENT>for message_dict in self._config.get('<STR_LIT>', {}):<EOL><INDENT>error = message_dict['<STR_LIT:error>']<EOL>tag = message_dict['<STR_LIT>']<EOL>model = message_dict['<STR_LIT>']<EOL>match_on = message_dict.get('<STR_LIT>', '<STR_LIT>')<EOL>if '<STR_LIT>' in message_dict:<EOL><INDENT>self.compiled_messages.append({<EOL>'<STR_LIT:error>': error,<EOL>'<STR_LIT>': tag,<EOL>'<STR_LIT>': match_on,<EOL>'<STR_LIT>': model,<EOL>'<STR_LIT>': message_dict['<STR_LIT>']<EOL>})<EOL>continue<EOL><DEDENT>values = message_dict['<STR_LIT>']<EOL>line = message_dict['<STR_LIT>']<EOL>mapping = message_dict['<STR_LIT>']<EOL>position = {}<EOL>replace = {}<EOL>for key in values.keys():<EOL><INDENT>if '<STR_LIT:|>' in key:<EOL><INDENT>new_key, replace[new_key] = key.replace('<STR_LIT:U+0020>', '<STR_LIT>').split('<STR_LIT:|>')<EOL>values[new_key] = values.pop(key)<EOL>key = new_key<EOL><DEDENT>position[line.find('<STR_LIT:{>' + key + '<STR_LIT:}>')] = key<EOL><DEDENT>sorted_position = {}<EOL>for i, elem in enumerate(sorted(position.items())):<EOL><INDENT>sorted_position[elem[<NUM_LIT:1>]] = i + <NUM_LIT:1><EOL><DEDENT>escaped = re.escape(line).replace(r'<STR_LIT>', '<STR_LIT:{>').replace(r'<STR_LIT>', '<STR_LIT:}>')<EOL>escaped = escaped.replace(r'<STR_LIT>', r'<STR_LIT>')<EOL>self.compiled_messages.append(<EOL>{<EOL>'<STR_LIT:error>': error,<EOL>'<STR_LIT>': tag,<EOL>'<STR_LIT>': match_on,<EOL>'<STR_LIT>': re.compile(escaped.format(**values)),<EOL>'<STR_LIT>': sorted_position,<EOL>'<STR_LIT>': values,<EOL>'<STR_LIT:replace>': replace,<EOL>'<STR_LIT>': model,<EOL>'<STR_LIT>': mapping<EOL>}<EOL>)<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>log.debug(self.compiled_messages)<EOL>
Create a list of all OS messages and their compiled regexs
f2783:c0:m3
def _parse(self, msg_dict):
error_present = False<EOL>for message in self.compiled_messages:<EOL><INDENT>match_on = message['<STR_LIT>']<EOL>if match_on not in msg_dict:<EOL><INDENT>continue<EOL><DEDENT>if message['<STR_LIT>'] != msg_dict[match_on]:<EOL><INDENT>continue<EOL><DEDENT>if '<STR_LIT>' in message:<EOL><INDENT>return {<EOL>'<STR_LIT>': message['<STR_LIT>'],<EOL>'<STR_LIT:error>': message['<STR_LIT:error>'],<EOL>'<STR_LIT>': message['<STR_LIT>']<EOL>}<EOL><DEDENT>error_present = True<EOL>match = message['<STR_LIT>'].search(msg_dict['<STR_LIT:message>'])<EOL>if not match:<EOL><INDENT>continue<EOL><DEDENT>positions = message.get('<STR_LIT>', {})<EOL>values = message.get('<STR_LIT>')<EOL>ret = {<EOL>'<STR_LIT>': message['<STR_LIT>'],<EOL>'<STR_LIT>': message['<STR_LIT>'],<EOL>'<STR_LIT:replace>': message['<STR_LIT:replace>'],<EOL>'<STR_LIT:error>': message['<STR_LIT:error>']<EOL>}<EOL>for key in values.keys():<EOL><INDENT>if key in message['<STR_LIT:replace>']:<EOL><INDENT>result = napalm_logs.utils.cast(match.group(positions.get(key)), message['<STR_LIT:replace>'][key])<EOL><DEDENT>else:<EOL><INDENT>result = match.group(positions.get(key))<EOL><DEDENT>ret[key] = result<EOL><DEDENT>return ret<EOL><DEDENT>if error_present is True:<EOL><INDENT>log.info('<STR_LIT>', self._name, msg_dict.get('<STR_LIT>', '<STR_LIT>'))<EOL><DEDENT>else:<EOL><INDENT>log.info('<STR_LIT>', self._name, msg_dict.get('<STR_LIT>', '<STR_LIT>'))<EOL><DEDENT>
Parse a syslog message and check what OpenConfig object should be generated.
f2783:c0:m4
def _emit(self, **kwargs):
oc_dict = {}<EOL>for mapping, result_key in kwargs['<STR_LIT>']['<STR_LIT>'].items():<EOL><INDENT>result = kwargs[result_key]<EOL>oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)<EOL><DEDENT>for mapping, result in kwargs['<STR_LIT>']['<STR_LIT>'].items():<EOL><INDENT>oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)<EOL><DEDENT>return oc_dict<EOL>
Emit an OpenConfig object given a certain combination of fields mappeed in the config to the corresponding hierarchy.
f2783:c0:m5
def _publish(self, obj):
bin_obj = umsgpack.packb(obj)<EOL>self.pub.send(bin_obj)<EOL>
Publish the OC object.
f2783:c0:m6
def start(self):
<EOL>napalm_logs_device_messages_received = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_device_raw_published_messages = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_device_published_messages = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>napalm_logs_device_oc_object_failed = Counter(<EOL>'<STR_LIT>',<EOL>"<STR_LIT>",<EOL>['<STR_LIT>']<EOL>)<EOL>self._setup_ipc()<EOL>thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))<EOL>thread.start()<EOL>signal.signal(signal.SIGTERM, self._exit_gracefully)<EOL>self.__up = True<EOL>while self.__up:<EOL><INDENT>try:<EOL><INDENT>bin_obj = self.sub.recv()<EOL>msg_dict, address = umsgpack.unpackb(bin_obj, use_list=False)<EOL><DEDENT>except zmq.ZMQError as error:<EOL><INDENT>if self.__up is False:<EOL><INDENT>log.info('<STR_LIT>', self._name)<EOL>return<EOL><DEDENT>else:<EOL><INDENT>raise NapalmLogsExit(error)<EOL><DEDENT><DEDENT>log.debug('<STR_LIT>', self._name, msg_dict, address)<EOL>napalm_logs_device_messages_received.labels(device_os=self._name).inc()<EOL>host = msg_dict.get('<STR_LIT:host>')<EOL>prefix_id = msg_dict.pop('<STR_LIT>')<EOL>if '<STR_LIT>' in msg_dict:<EOL><INDENT>timestamp = msg_dict.pop('<STR_LIT>')<EOL><DEDENT>else:<EOL><INDENT>timestamp = self._format_time(msg_dict.get('<STR_LIT:time>', '<STR_LIT>'),<EOL>msg_dict.get('<STR_LIT:date>', '<STR_LIT>'),<EOL>msg_dict.get('<STR_LIT>', '<STR_LIT>'),<EOL>prefix_id)<EOL><DEDENT>facility = msg_dict.get('<STR_LIT>')<EOL>severity = msg_dict.get('<STR_LIT>')<EOL>kwargs = self._parse(msg_dict)<EOL>if not kwargs:<EOL><INDENT>to_publish = {<EOL>'<STR_LIT>': address,<EOL>'<STR_LIT:host>': host,<EOL>'<STR_LIT>': timestamp,<EOL>'<STR_LIT>': msg_dict,<EOL>'<STR_LIT>': self._name,<EOL>'<STR_LIT:error>': '<STR_LIT>',<EOL>'<STR_LIT>': '<STR_LIT>',<EOL>'<STR_LIT>': facility,<EOL>'<STR_LIT>': severity<EOL>}<EOL>log.debug('<STR_LIT>')<EOL>log.debug(to_publish)<EOL>self.pub.send(umsgpack.packb(to_publish))<EOL>napalm_logs_device_raw_published_messages.labels(device_os=self._name).inc()<EOL>continue<EOL><DEDENT>try:<EOL><INDENT>if '<STR_LIT>' in kwargs:<EOL><INDENT>log.debug('<STR_LIT>')<EOL>yang_obj = kwargs['<STR_LIT>'](msg_dict)<EOL><DEDENT>else:<EOL><INDENT>yang_obj = self._emit(**kwargs)<EOL><DEDENT><DEDENT>except Exception:<EOL><INDENT>log.exception('<STR_LIT>', exc_info=True)<EOL>napalm_logs_device_oc_object_failed.labels(device_os=self._name).inc()<EOL>continue<EOL><DEDENT>log.debug('<STR_LIT>')<EOL>log.debug(yang_obj)<EOL>error = kwargs.get('<STR_LIT:error>')<EOL>model_name = kwargs.get('<STR_LIT>')<EOL>to_publish = {<EOL>'<STR_LIT:error>': error,<EOL>'<STR_LIT:host>': host,<EOL>'<STR_LIT>': address,<EOL>'<STR_LIT>': timestamp,<EOL>'<STR_LIT>': yang_obj,<EOL>'<STR_LIT>': msg_dict,<EOL>'<STR_LIT>': model_name,<EOL>'<STR_LIT>': self._name,<EOL>'<STR_LIT>': facility,<EOL>'<STR_LIT>': severity<EOL>}<EOL>log.debug('<STR_LIT>')<EOL>log.debug(to_publish)<EOL>self.pub.send(umsgpack.packb(to_publish))<EOL>napalm_logs_device_published_messages.labels(device_os=self._name).inc()<EOL><DEDENT>
Start the worker process.
f2783:c0:m8
def stop(self):
log.info('<STR_LIT>', self._name)<EOL>self.__up = False<EOL>self.sub.close()<EOL>self.pub.close()<EOL>self.ctx.term()<EOL>
Stop the worker process.
f2783:c0:m9
def get_transport(name):
try:<EOL><INDENT>log.debug('<STR_LIT>', name)<EOL>return TRANSPORT_LOOKUP[name]<EOL><DEDENT>except KeyError:<EOL><INDENT>msg = '<STR_LIT>'.format(name)<EOL>log.error(msg, exc_info=True)<EOL>raise InvalidTransportException(msg)<EOL><DEDENT>
Return the transport class.
f2788:m0
def _exit_gracefully(signum, _):
global _up<EOL>_up = False<EOL>
Called when a signal is caught and marks exiting variable True
f2792:m0
def _suicide_when_without_parent(self, parent_pid):
while True:<EOL><INDENT>time.sleep(<NUM_LIT:5>)<EOL>try:<EOL><INDENT>os.kill(parent_pid, <NUM_LIT:0>)<EOL><DEDENT>except OSError:<EOL><INDENT>self.stop()<EOL>log.warning('<STR_LIT>')<EOL>os._exit(<NUM_LIT>)<EOL><DEDENT><DEDENT>
Kill this process when the parent died.
f2794:c0:m0