Search is not available for this dataset
text
stringlengths
75
104k
def paginate_dataframe(self, dataframe): """ Return a single page of results, or `None` if pagination is disabled. """ if self.paginator is None: return None return self.paginator.paginate_dataframe(dataframe, self.request, view=self)
def parse(self): """parse config, return a dict""" if exists(self.filepath): content = open(self.filepath).read().decode(charset) else: content = "" try: config = toml.loads(content) except toml.TomlSyntaxError: raise ConfigSyntaxError return config
def chunks(lst, number): """ A generator, split list `lst` into `number` equal size parts. usage:: >>> parts = chunks(range(8),3) >>> parts <generator object chunks at 0xb73bd964> >>> list(parts) [[0, 1, 2], [3, 4, 5], [6, 7]] """ lst_len = len(lst) for i in xrange(0, lst_len, number): yield lst[i: i+number]
def update_nested_dict(a, b): """ update nested dict `a` with another dict b. usage:: >>> a = {'x' : { 'y': 1}} >>> b = {'x' : {'z':2, 'y':3}, 'w': 4} >>> update_nested_dict(a,b) {'x': {'y': 3, 'z': 2}, 'w': 4} """ for k, v in b.iteritems(): if isinstance(v, dict): d = a.setdefault(k, {}) update_nested_dict(d, v) else: a[k] = v return a
def render_to(path, template, **data): """shortcut to render data with `template` and then write to `path`. Just add exception catch to `renderer.render_to`""" try: renderer.render_to(path, template, **data) except JinjaTemplateNotFound as e: logger.error(e.__doc__ + ', Template: %r' % template) sys.exit(e.exit_code)
def block_code(self, text, lang): """text: unicode text to render""" if not lang: return self._code_no_lexer(text) try: lexer = get_lexer_by_name(lang, stripall=True) except ClassNotFound: # lexer not found, use plain text return self._code_no_lexer(text) formatter = HtmlFormatter() return highlight(text, lexer, formatter)
def parse(self, source): """Parse ascii post source, return dict""" rt, title, title_pic, markdown = libparser.parse(source) if rt == -1: raise SeparatorNotFound elif rt == -2: raise PostTitleNotFound # change to unicode title, title_pic, markdown = map(to_unicode, (title, title_pic, markdown)) # render to html html = self.markdown.render(markdown) summary = self.markdown.render(markdown[:200]) return { 'title': title, 'markdown': markdown, 'html': html, 'summary': summary, 'title_pic': title_pic }
def parse_filename(self, filepath): """parse post source files name to datetime object""" name = os.path.basename(filepath)[:-src_ext_len] try: dt = datetime.strptime(name, "%Y-%m-%d-%H-%M") except ValueError: raise PostNameInvalid return {'name': name, 'datetime': dt, 'filepath': filepath}
def run_server(self, port): """run a server binding to port""" try: self.server = MultiThreadedHTTPServer(('0.0.0.0', port), Handler) except socket.error, e: # failed to bind port logger.error(str(e)) sys.exit(1) logger.info("HTTP serve at http://0.0.0.0:%d (ctrl-c to stop) ..." % port) try: self.server.serve_forever() except KeyboardInterrupt: logger.info("^C received, shutting down server") self.shutdown_server()
def get_files_stat(self): """get source files' update time""" if not exists(Post.src_dir): logger.error(SourceDirectoryNotFound.__doc__) sys.exit(SourceDirectoryNotFound.exit_code) paths = [] for fn in ls(Post.src_dir): if fn.endswith(src_ext): paths.append(join(Post.src_dir, fn)) # config.toml if exists(config.filepath): paths.append(config.filepath) # files: a <filepath to updated time> dict files = dict((p, stat(p).st_mtime) for p in paths) return files
def watch_files(self): """watch files for changes, if changed, rebuild blog. this thread will quit if the main process ends""" try: while 1: sleep(1) # check every 1s try: files_stat = self.get_files_stat() except SystemExit: logger.error("Error occurred, server shut down") self.shutdown_server() if self.files_stat != files_stat: logger.info("Changes detected, start rebuilding..") try: generator.re_generate() global _root _root = generator.root except SystemExit: # catch sys.exit, it means fatal error logger.error("Error occurred, server shut down") self.shutdown_server() self.files_stat = files_stat # update files' stat except KeyboardInterrupt: # I dont know why, but this exception won't be catched # because absolutly each KeyboardInterrupt is catched by # the server thread, which will terminate this thread the same time logger.info("^C received, shutting down watcher") self.shutdown_watcher()
def parse(src): """Note: src should be ascii string""" rt = libparser.parse(byref(post), src) return ( rt, string_at(post.title, post.tsz), string_at(post.tpic, post.tpsz), post.body )
def deploy_blog(): """Deploy new blog to current directory""" logger.info(deploy_blog.__doc__) # `rsync -aqu path/to/res/* .` call( 'rsync -aqu ' + join(dirname(__file__), 'res', '*') + ' .', shell=True) logger.success('Done') logger.info('Please edit config.toml to meet your needs')
def new_post(): """Touch a new post in src/""" logger.info(new_post.__doc__) # make the new post's filename now = datetime.datetime.now() now_s = now.strftime('%Y-%m-%d-%H-%M') filepath = join(Post.src_dir, now_s + src_ext) # check if `src/` exists if not exists(Post.src_dir): logger.error(SourceDirectoryNotFound.__doc__) sys.exit(SourceDirectoryNotFound.exit_code) # write sample content to new post content = ( 'Title\n' 'Title Picture URL\n' '---\n' 'Markdown content ..' ) f = open(filepath, 'w') f.write(content) f.close() logger.success('New post created: %s' % filepath)
def clean(): """Clean htmls rux built: `rm -rf post page index.html`""" logger.info(clean.__doc__) paths = ['post', 'page', 'index.html'] call(['rm', '-rf'] + paths) logger.success('Done')
def resolve_blocks(template, context): ''' Return a BlockContext instance of all the {% block %} tags in the template. If template is a string, it will be resolved through get_template ''' try: blocks = context.render_context[BLOCK_CONTEXT_KEY] except KeyError: blocks = context.render_context[BLOCK_CONTEXT_KEY] = BlockContext() # If it's just the name, resolve into template if isinstance(template, six.string_types): template = get_template(template) # For Django 1.8 compatibility template = getattr(template, 'template', template) # Add this templates blocks as the first local_blocks = { block.name: block for block in template.nodelist.get_nodes_by_type(BlockNode) } blocks.add_blocks(local_blocks) # Do we extend a parent template? extends = template.nodelist.get_nodes_by_type(ExtendsNode) if extends: # Can only have one extends in a template extends_node = extends[0] # Get the parent, and recurse parent_template = extends_node.get_parent(context) resolve_blocks(parent_template, context) return blocks
def parse_widget_name(widget): ''' Parse a alias:block_name string into separate parts. ''' try: alias, block_name = widget.split(':', 1) except ValueError: raise template.TemplateSyntaxError('widget name must be "alias:block_name" - %s' % widget) return alias, block_name
def using(context, alias): ''' Temporarily update the context to use the BlockContext for the given alias. ''' # An empty alias means look in the current widget set. if alias == '': yield context else: try: widgets = context.render_context[WIDGET_CONTEXT_KEY] except KeyError: raise template.TemplateSyntaxError('No widget libraries loaded!') try: block_set = widgets[alias] except KeyError: raise template.TemplateSyntaxError('No widget library loaded for alias: %r' % alias) context.render_context.push() context.render_context[BLOCK_CONTEXT_KEY] = block_set context.render_context[WIDGET_CONTEXT_KEY] = widgets yield context context.render_context.pop()
def find_block(context, *names): ''' Find the first matching block in the current block_context ''' block_set = context.render_context[BLOCK_CONTEXT_KEY] for name in names: block = block_set.get_block(name) if block is not None: return block raise template.TemplateSyntaxError('No widget found for: %r' % (names,))
def load_widgets(context, **kwargs): ''' Load a series of widget libraries. ''' _soft = kwargs.pop('_soft', False) try: widgets = context.render_context[WIDGET_CONTEXT_KEY] except KeyError: widgets = context.render_context[WIDGET_CONTEXT_KEY] = {} for alias, template_name in kwargs.items(): if _soft and alias in widgets: continue with context.render_context.push({BLOCK_CONTEXT_KEY: BlockContext()}): blocks = resolve_blocks(template_name, context) widgets[alias] = blocks return ''
def auto_widget(field): '''Return a list of widget names for the provided field.''' # Auto-detect info = { 'widget': field.field.widget.__class__.__name__, 'field': field.field.__class__.__name__, 'name': field.name, } return [ fmt.format(**info) for fmt in ( '{field}_{widget}_{name}', '{field}_{name}', '{widget}_{name}', '{field}_{widget}', '{name}', '{widget}', '{field}', ) ]
def reuse(context, block_list, **kwargs): ''' Allow reuse of a block within a template. {% reuse '_myblock' foo=bar %} If passed a list of block names, will use the first that matches: {% reuse list_of_block_names .... %} ''' try: block_context = context.render_context[BLOCK_CONTEXT_KEY] except KeyError: block_context = BlockContext() if not isinstance(block_list, (list, tuple)): block_list = [block_list] for block in block_list: block = block_context.get_block(block) if block: break else: return '' with context.push(kwargs): return block.render(context)
def display(self): """ When dealing with optgroups, ensure that the value is properly force_text'd. """ if not self.is_group(): return self._display return ((force_text(k), v) for k, v in self._display)
def _list_key(self, key): """ boilerplate """ ret = [] for msg_json in self.client.lrange(key, 0, -1): ret.append(self._fromJSON(msg_json)) return ret
def create_message(self, level, msg_text, extra_tags='', date=None, url=None): """ Message instances are namedtuples of type `Message`. The date field is already serialized in datetime.isoformat ECMA-262 format """ if not date: now = timezone.now() else: now = date r = now.isoformat() if now.microsecond: r = r[:23] + r[26:] if r.endswith('+00:00'): r = r[:-6] + 'Z' fingerprint = r + msg_text msg_id = hashlib.sha256(fingerprint.encode('ascii', 'ignore')).hexdigest() return Message(id=msg_id, message=msg_text, level=level, tags=extra_tags, date=r, url=url)
def add_message_for(users, level, message_text, extra_tags='', date=None, url=None, fail_silently=False): """ Send a message to a list of users without passing through `django.contrib.messages` :param users: an iterable containing the recipients of the messages :param level: message level :param message_text: the string containing the message :param extra_tags: like the Django api, a string containing extra tags for the message :param date: a date, different than the default timezone.now :param url: an optional url :param fail_silently: not used at the moment """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() m = backend.create_message(level, message_text, extra_tags, date, url) backend.archive_store(users, m) backend.inbox_store(users, m)
def broadcast_message(level, message_text, extra_tags='', date=None, url=None, fail_silently=False): """ Send a message to all users aka broadcast. :param level: message level :param message_text: the string containing the message :param extra_tags: like the Django api, a string containing extra tags for the message :param date: a date, different than the default timezone.now :param url: an optional url :param fail_silently: not used at the moment """ from django.contrib.auth import get_user_model users = get_user_model().objects.all() add_message_for(users, level, message_text, extra_tags=extra_tags, date=date, url=url, fail_silently=fail_silently)
def mark_read(user, message): """ Mark message instance as read for user. Returns True if the message was `unread` and thus actually marked as `read` or False in case it is already `read` or it does not exist at all. :param user: user instance for the recipient :param message: a Message instance to mark as read """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() backend.inbox_delete(user, message)
def mark_all_read(user): """ Mark all message instances for a user as read. :param user: user instance for the recipient """ BackendClass = stored_messages_settings.STORAGE_BACKEND backend = BackendClass() backend.inbox_purge(user)
def mark_all_read(request): """ Mark all messages as read (i.e. delete from inbox) for current logged in user """ from .settings import stored_messages_settings backend = stored_messages_settings.STORAGE_BACKEND() backend.inbox_purge(request.user) return Response({"message": "All messages read"})
def read(self, request, pk=None): """ Mark the message as read (i.e. delete from inbox) """ from .settings import stored_messages_settings backend = stored_messages_settings.STORAGE_BACKEND() try: backend.inbox_delete(request.user, pk) except MessageDoesNotExist as e: return Response(e.message, status='404') return Response({'status': 'message marked as read'})
def stored_messages_list(context, num_elements=10): """ Renders a list of unread stored messages for the current user """ if "user" in context: user = context["user"] if user.is_authenticated(): qs = Inbox.objects.select_related("message").filter(user=user) return { "messages": qs[:num_elements], "count": qs.count(), }
def stored_messages_count(context): """ Renders a list of unread stored messages for the current user """ if "user" in context: user = context["user"] if user.is_authenticated(): return Inbox.objects.select_related("message").filter(user=user).count()
def stored_messages_archive(context, num_elements=10): """ Renders a list of archived messages for the current user """ if "user" in context: user = context["user"] if user.is_authenticated(): qs = MessageArchive.objects.select_related("message").filter(user=user) return { "messages": qs[:num_elements], "count": qs.count(), }
def _get(self, *args, **kwargs): """ Retrieve unread messages for current user, both from the inbox and from other storages """ messages, all_retrieved = super(StorageMixin, self)._get(*args, **kwargs) if self.user.is_authenticated(): inbox_messages = self.backend.inbox_list(self.user) else: inbox_messages = [] return messages + inbox_messages, all_retrieved
def add(self, level, message, extra_tags=''): """ If the message level was configured for being stored and request.user is not anonymous, save it to the database. Otherwise, let some other class handle the message. Notice: controls like checking the message is not empty and the level is above the filter need to be performed here, but it could happen they'll be performed again later if the message does not need to be stored. """ if not message: return # Check that the message level is not less than the recording level. level = int(level) if level < self.level: return # Check if the message doesn't have a level that needs to be persisted if level not in stored_messages_settings.STORE_LEVELS or self.user.is_anonymous(): return super(StorageMixin, self).add(level, message, extra_tags) self.added_new = True m = self.backend.create_message(level, message, extra_tags) self.backend.archive_store([self.user], m) self._queued_messages.append(m)
def _store(self, messages, response, *args, **kwargs): """ persistent messages are already in the database inside the 'archive', so we can say they're already "stored". Here we put them in the inbox, or remove from the inbox in case the messages were iterated. messages contains only new msgs if self.used==True else contains both new and unread messages """ contrib_messages = [] if self.user.is_authenticated(): if not messages: # erase inbox self.backend.inbox_purge(self.user) else: for m in messages: try: self.backend.inbox_store([self.user], m) except MessageTypeNotSupported: contrib_messages.append(m) super(StorageMixin, self)._store(contrib_messages, response, *args, **kwargs)
def _prepare_messages(self, messages): """ Like the base class method, prepares a list of messages for storage but avoid to do this for `models.Message` instances. """ for message in messages: if not self.backend.can_handle(message): message._prepare()
def jocker(test_options=None): """Main entry point for script.""" version = ver_check() options = test_options or docopt(__doc__, version=version) _set_global_verbosity_level(options.get('--verbose')) jocker_lgr.debug(options) jocker_run(options)
def init(base_level=DEFAULT_BASE_LOGGING_LEVEL, verbose_level=DEFAULT_VERBOSE_LOGGING_LEVEL, logging_config=None): """initializes a base logger you can use this to init a logger in any of your files. this will use config.py's LOGGER param and logging.dictConfig to configure the logger for you. :param int|logging.LEVEL base_level: desired base logging level :param int|logging.LEVEL verbose_level: desired verbose logging level :param dict logging_dict: dictConfig based configuration. used to override the default configuration from config.py :rtype: `python logger` """ if logging_config is None: logging_config = {} logging_config = logging_config or LOGGER # TODO: (IMPRV) only perform file related actions if file handler is # TODO: (IMPRV) defined. log_file = LOGGER['handlers']['file']['filename'] log_dir = os.path.dirname(os.path.expanduser(log_file)) if os.path.isfile(log_dir): sys.exit('file {0} exists - log directory cannot be created ' 'there. please remove the file and try again.' .format(log_dir)) try: if not os.path.exists(log_dir) and not len(log_dir) == 0: os.makedirs(log_dir) dictconfig.dictConfig(logging_config) lgr = logging.getLogger('user') lgr.setLevel(base_level) return lgr except ValueError as e: sys.exit('could not initialize logger.' ' verify your logger config' ' and permissions to write to {0} ({1})' .format(log_file, e))
def cfg_convert(self, value): """Default converter for the cfg:// protocol.""" rest = value m = self.WORD_PATTERN.match(rest) if m is None: raise ValueError("Unable to convert %r" % value) else: rest = rest[m.end():] d = self.config[m.groups()[0]] # print d, rest while rest: m = self.DOT_PATTERN.match(rest) if m: d = d[m.groups()[0]] else: m = self.INDEX_PATTERN.match(rest) if m: idx = m.groups()[0] if not self.DIGIT_PATTERN.match(idx): d = d[idx] else: try: n = int(idx) d = d[n] except TypeError: d = d[idx] if m: rest = rest[m.end():] else: raise ValueError('Unable to convert ' '%r at %r' % (value, rest)) # rest should be empty return d
def configure_custom(self, config): """Configure an object with a user-supplied factory.""" c = config.pop('()') if not hasattr(c, '__call__') and \ hasattr(types, 'ClassType') and isinstance(c, types.ClassType): c = self.resolve(c) props = config.pop('.', None) # Check for valid identifiers kwargs = dict((k, config[k]) for k in config if valid_ident(k)) result = c(**kwargs) if props: for name, value in props.items(): setattr(result, name, value) return result
def _set_global_verbosity_level(is_verbose_output=False): """sets the global verbosity level for console and the jocker_lgr logger. :param bool is_verbose_output: should be output be verbose """ global verbose_output # TODO: (IMPRV) only raise exceptions in verbose mode verbose_output = is_verbose_output if verbose_output: jocker_lgr.setLevel(logging.DEBUG) else: jocker_lgr.setLevel(logging.INFO)
def _import_config(config_file): """returns a configuration object :param string config_file: path to config file """ # get config file path jocker_lgr.debug('config file is: {0}'.format(config_file)) # append to path for importing try: jocker_lgr.debug('importing config...') with open(config_file, 'r') as c: return yaml.safe_load(c.read()) except IOError as ex: jocker_lgr.error(str(ex)) raise RuntimeError('cannot access config file') except yaml.parser.ParserError as ex: jocker_lgr.error('invalid yaml file: {0}'.format(ex)) raise RuntimeError('invalid yaml file')
def execute(varsfile, templatefile, outputfile=None, configfile=None, dryrun=False, build=False, push=False, verbose=False): """generates a Dockerfile, builds an image and pushes it to DockerHub A `Dockerfile` will be generated by Jinja2 according to the `varsfile` imported. If build is true, an image will be generated from the `outputfile` which is the generated Dockerfile and committed to the image:tag string supplied to `build`. If push is true, a build will be triggered and the produced image will be pushed to DockerHub upon completion. :param string varsfile: path to file with variables. :param string templatefile: path to template file to use. :param string outputfile: path to output Dockerfile. :param string configfile: path to yaml file with docker-py config. :param bool dryrun: mock run. :param build: False or the image:tag to build to. :param push: False or the image:tag to build to. (triggers build) :param bool verbose: verbose output. """ if dryrun and (build or push): jocker_lgr.error('dryrun requested, cannot build.') sys.exit(100) _set_global_verbosity_level(verbose) j = Jocker(varsfile, templatefile, outputfile, configfile, dryrun, build, push) formatted_text = j.generate() if dryrun: g = j.dryrun(formatted_text) if build or push: j.build_image() if push: j.push_image() if dryrun: return g
def _parse_dumb_push_output(self, string): """since the push process outputs a single unicode string consisting of multiple JSON formatted "status" lines, we need to parse it so that it can be read as multiple strings. This will receive the string as an input, count curly braces and ignore any newlines. When the curly braces stack is 0, it will append the entire string it has read up until then to a list and so forth. :param string: the string to parse :rtype: list of JSON's """ stack = 0 json_list = [] tmp_json = '' for char in string: if not char == '\r' and not char == '\n': tmp_json += char if char == '{': stack += 1 elif char == '}': stack -= 1 if stack == 0: if not len(tmp_json) == 0: json_list.append(tmp_json) tmp_json = '' return json_list
def upload_gif(gif): """Uploads an image file to Imgur""" client_id = os.environ.get('IMGUR_API_ID') client_secret = os.environ.get('IMGUR_API_SECRET') if client_id is None or client_secret is None: click.echo('Cannot upload - could not find IMGUR_API_ID or IMGUR_API_SECRET environment variables') return client = ImgurClient(client_id, client_secret) click.echo('Uploading file {}'.format(click.format_filename(gif))) response = client.upload_from_path(gif) click.echo('File uploaded - see your gif at {}'.format(response['link']))
def is_dot(ip): """Return true if the IP address is in dotted decimal notation.""" octets = str(ip).split('.') if len(octets) != 4: return False for i in octets: try: val = int(i) except ValueError: return False if val > 255 or val < 0: return False return True
def is_bin(ip): """Return true if the IP address is in binary notation.""" try: ip = str(ip) if len(ip) != 32: return False dec = int(ip, 2) except (TypeError, ValueError): return False if dec > 4294967295 or dec < 0: return False return True
def is_oct(ip): """Return true if the IP address is in octal notation.""" try: dec = int(str(ip), 8) except (TypeError, ValueError): return False if dec > 0o37777777777 or dec < 0: return False return True
def is_dec(ip): """Return true if the IP address is in decimal notation.""" try: dec = int(str(ip)) except ValueError: return False if dec > 4294967295 or dec < 0: return False return True
def _check_nm(nm, notation): """Function internally used to check if the given netmask is of the specified notation.""" # Convert to decimal, and check if it's in the list of valid netmasks. _NM_CHECK_FUNCT = { NM_DOT: _dot_to_dec, NM_HEX: _hex_to_dec, NM_BIN: _bin_to_dec, NM_OCT: _oct_to_dec, NM_DEC: _dec_to_dec_long} try: dec = _NM_CHECK_FUNCT[notation](nm, check=True) except ValueError: return False if dec in _NETMASKS_VALUES: return True return False
def is_bits_nm(nm): """Return true if the netmask is in bits notatation.""" try: bits = int(str(nm)) except ValueError: return False if bits > 32 or bits < 0: return False return True
def is_wildcard_nm(nm): """Return true if the netmask is in wildcard bits notatation.""" try: dec = 0xFFFFFFFF - _dot_to_dec(nm, check=True) except ValueError: return False if dec in _NETMASKS_VALUES: return True return False
def _dot_to_dec(ip, check=True): """Dotted decimal notation to decimal conversion.""" if check and not is_dot(ip): raise ValueError('_dot_to_dec: invalid IP: "%s"' % ip) octets = str(ip).split('.') dec = 0 dec |= int(octets[0]) << 24 dec |= int(octets[1]) << 16 dec |= int(octets[2]) << 8 dec |= int(octets[3]) return dec
def _dec_to_dot(ip): """Decimal to dotted decimal notation conversion.""" first = int((ip >> 24) & 255) second = int((ip >> 16) & 255) third = int((ip >> 8) & 255) fourth = int(ip & 255) return '%d.%d.%d.%d' % (first, second, third, fourth)
def _hex_to_dec(ip, check=True): """Hexadecimal to decimal conversion.""" if check and not is_hex(ip): raise ValueError('_hex_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = hex(ip) return int(str(ip), 16)
def _oct_to_dec(ip, check=True): """Octal to decimal conversion.""" if check and not is_oct(ip): raise ValueError('_oct_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = oct(ip) return int(str(ip), 8)
def _bin_to_dec(ip, check=True): """Binary to decimal conversion.""" if check and not is_bin(ip): raise ValueError('_bin_to_dec: invalid IP: "%s"' % ip) if isinstance(ip, int): ip = str(ip) return int(str(ip), 2)
def _BYTES_TO_BITS(): """Generate a table to convert a whole byte to binary. This code was taken from the Python Cookbook, 2nd edition - O'Reilly.""" the_table = 256*[None] bits_per_byte = list(range(7, -1, -1)) for n in range(256): l = n bits = 8*[None] for i in bits_per_byte: bits[i] = '01'[n & 1] n >>= 1 the_table[l] = ''.join(bits) return the_table
def _dec_to_bin(ip): """Decimal to binary conversion.""" bits = [] while ip: bits.append(_BYTES_TO_BITS[ip & 255]) ip >>= 8 bits.reverse() return ''.join(bits) or 32*'0'
def _dec_to_dec_long(ip, check=True): """Decimal to decimal (long) conversion.""" if check and not is_dec(ip): raise ValueError('_dec_to_dec: invalid IP: "%s"' % ip) return int(str(ip))
def _bits_to_dec(nm, check=True): """Bits to decimal conversion.""" if check and not is_bits_nm(nm): raise ValueError('_bits_to_dec: invalid netmask: "%s"' % nm) bits = int(str(nm)) return VALID_NETMASKS[bits]
def _wildcard_to_dec(nm, check=False): """Wildcard bits to decimal conversion.""" if check and not is_wildcard_nm(nm): raise ValueError('_wildcard_to_dec: invalid netmask: "%s"' % nm) return 0xFFFFFFFF - _dot_to_dec(nm, check=False)
def _is_notation(ip, notation, _isnm): """Internally used to check if an IP/netmask is in the given notation.""" notation_orig = notation notation = _get_notation(notation) if notation not in _CHECK_FUNCT_KEYS: raise ValueError('_is_notation: unkown notation: "%s"' % notation_orig) return _CHECK_FUNCT[notation][_isnm](ip)
def _detect(ip, _isnm): """Function internally used to detect the notation of the given IP or netmask.""" ip = str(ip) if len(ip) > 1: if ip[0:2] == '0x': if _CHECK_FUNCT[IP_HEX][_isnm](ip): return IP_HEX elif ip[0] == '0': if _CHECK_FUNCT[IP_OCT][_isnm](ip): return IP_OCT if _CHECK_FUNCT[IP_DOT][_isnm](ip): return IP_DOT elif _isnm and _CHECK_FUNCT[NM_BITS][_isnm](ip): return NM_BITS elif _CHECK_FUNCT[IP_DEC][_isnm](ip): return IP_DEC elif _isnm and _CHECK_FUNCT[NM_WILDCARD][_isnm](ip): return NM_WILDCARD elif _CHECK_FUNCT[IP_BIN][_isnm](ip): return IP_BIN return IP_UNKNOWN
def _convert(ip, notation, inotation, _check, _isnm): """Internally used to convert IPs and netmasks to other notations.""" inotation_orig = inotation notation_orig = notation inotation = _get_notation(inotation) notation = _get_notation(notation) if inotation is None: raise ValueError('_convert: unknown input notation: "%s"' % inotation_orig) if notation is None: raise ValueError('_convert: unknown output notation: "%s"' % notation_orig) docheck = _check or False if inotation == IP_UNKNOWN: inotation = _detect(ip, _isnm) if inotation == IP_UNKNOWN: raise ValueError('_convert: unable to guess input notation or invalid value') if _check is None: docheck = True # We _always_ check this case later. if _isnm: docheck = False dec = 0 if inotation == IP_DOT: dec = _dot_to_dec(ip, docheck) elif inotation == IP_HEX: dec = _hex_to_dec(ip, docheck) elif inotation == IP_BIN: dec = _bin_to_dec(ip, docheck) elif inotation == IP_OCT: dec = _oct_to_dec(ip, docheck) elif inotation == IP_DEC: dec = _dec_to_dec_long(ip, docheck) elif _isnm and inotation == NM_BITS: dec = _bits_to_dec(ip, docheck) elif _isnm and inotation == NM_WILDCARD: dec = _wildcard_to_dec(ip, docheck) else: raise ValueError('_convert: unknown IP/netmask notation: "%s"' % inotation_orig) # Ensure this is a valid netmask. if _isnm and dec not in _NETMASKS_VALUES: raise ValueError('_convert: invalid netmask: "%s"' % ip) if notation == IP_DOT: return _dec_to_dot(dec) elif notation == IP_HEX: return _dec_to_hex(dec) elif notation == IP_BIN: return _dec_to_bin(dec) elif notation == IP_OCT: return _dec_to_oct(dec) elif notation == IP_DEC: return _dec_to_dec_str(dec) elif _isnm and notation == NM_BITS: return _dec_to_bits(dec) elif _isnm and notation == NM_WILDCARD: return _dec_to_wildcard(dec) else: raise ValueError('convert: unknown notation: "%s"' % notation_orig)
def convert(ip, notation=IP_DOT, inotation=IP_UNKNOWN, check=True): """Convert among IP address notations. Given an IP address, this function returns the address in another notation. @param ip: the IP address. @type ip: integers, strings or object with an appropriate __str()__ method. @param notation: the notation of the output (default: IP_DOT). @type notation: one of the IP_* constants, or the equivalent strings. @param inotation: force the input to be considered in the given notation (default the notation of the input is autodetected). @type inotation: one of the IP_* constants, or the equivalent strings. @param check: force the notation check on the input. @type check: True force the check, False force not to check and None do the check only if the inotation is unknown. @return: a string representing the IP in the selected notation. @raise ValueError: raised when the input is in unknown notation.""" return _convert(ip, notation, inotation, _check=check, _isnm=False)
def convert_nm(nm, notation=IP_DOT, inotation=IP_UNKNOWN, check=True): """Convert a netmask to another notation.""" return _convert(nm, notation, inotation, _check=check, _isnm=True)
def set(self, ip, notation=IP_UNKNOWN): """Set the IP address/netmask.""" self._ip_dec = int(_convert(ip, notation=IP_DEC, inotation=notation, _check=True, _isnm=self._isnm)) self._ip = _convert(self._ip_dec, notation=IP_DOT, inotation=IP_DEC, _check=False, _isnm=self._isnm)
def get_hex(self): """Return the hexadecimal notation of the address/netmask.""" return _convert(self._ip_dec, notation=IP_HEX, inotation=IP_DEC, _check=False, _isnm=self._isnm)
def get_bin(self): """Return the binary notation of the address/netmask.""" return _convert(self._ip_dec, notation=IP_BIN, inotation=IP_DEC, _check=False, _isnm=self._isnm)
def get_oct(self): """Return the octal notation of the address/netmask.""" return _convert(self._ip_dec, notation=IP_OCT, inotation=IP_DEC, _check=False, _isnm=self._isnm)
def _cmp_prepare(self, other): """Prepare the item to be compared with this address/netmask.""" if isinstance(other, self.__class__): return other._ip_dec elif isinstance(other, int): # NOTE: this hides the fact that "other" can be a non valid IP/nm. return other return self.__class__(other)._ip_dec
def _add(self, other): """Sum two IP addresses.""" if isinstance(other, self.__class__): sum_ = self._ip_dec + other._ip_dec elif isinstance(other, int): sum_ = self._ip_dec + other else: other = self.__class__(other) sum_ = self._ip_dec + other._ip_dec return sum_
def _sub(self, other): """Subtract two IP addresses.""" if isinstance(other, self.__class__): sub = self._ip_dec - other._ip_dec if isinstance(other, int): sub = self._ip_dec - other else: other = self.__class__(other) sub = self._ip_dec - other._ip_dec return sub
def get_bits(self): """Return the bits notation of the netmask.""" return _convert(self._ip, notation=NM_BITS, inotation=IP_DOT, _check=False, _isnm=self._isnm)
def get_wildcard(self): """Return the wildcard bits notation of the netmask.""" return _convert(self._ip, notation=NM_WILDCARD, inotation=IP_DOT, _check=False, _isnm=self._isnm)
def set(self, ip, netmask=None): """Set the IP address and the netmask.""" if isinstance(ip, str) and netmask is None: ipnm = ip.split('/') if len(ipnm) != 2: raise ValueError('set: invalid CIDR: "%s"' % ip) ip = ipnm[0] netmask = ipnm[1] if isinstance(ip, IPv4Address): self._ip = ip else: self._ip = IPv4Address(ip) if isinstance(netmask, IPv4NetMask): self._nm = netmask else: self._nm = IPv4NetMask(netmask) ipl = int(self._ip) nml = int(self._nm) base_add = ipl & nml self._ip_num = 0xFFFFFFFF - 1 - nml # NOTE: quite a mess. # This's here to handle /32 (-1) and /31 (0) netmasks. if self._ip_num in (-1, 0): if self._ip_num == -1: self._ip_num = 1 else: self._ip_num = 2 self._net_ip = None self._bc_ip = None self._first_ip_dec = base_add self._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC) if self._ip_num == 1: last_ip_dec = self._first_ip_dec else: last_ip_dec = self._first_ip_dec + 1 self._last_ip = IPv4Address(last_ip_dec, notation=IP_DEC) return self._net_ip = IPv4Address(base_add, notation=IP_DEC) self._bc_ip = IPv4Address(base_add + self._ip_num + 1, notation=IP_DEC) self._first_ip_dec = base_add + 1 self._first_ip = IPv4Address(self._first_ip_dec, notation=IP_DEC) self._last_ip = IPv4Address(base_add + self._ip_num, notation=IP_DEC)
def set_ip(self, ip): """Change the current IP.""" self.set(ip=ip, netmask=self._nm)
def set_netmask(self, netmask): """Change the current netmask.""" self.set(ip=self._ip, netmask=netmask)
def is_valid_ip(self, ip): """Return true if the given address in amongst the usable addresses, or if the given CIDR is contained in this one.""" if not isinstance(ip, (IPv4Address, CIDR)): if str(ip).find('/') == -1: ip = IPv4Address(ip) else: # Support for CIDR strings/objects, an idea of Nicola Novello. ip = CIDR(ip) if isinstance(ip, IPv4Address): if ip < self._first_ip or ip > self._last_ip: return False elif isinstance(ip, CIDR): # NOTE: manage /31 networks; 127.0.0.1/31 is considered to # be included in 127.0.0.1/8. if ip._nm._ip_dec == 0xFFFFFFFE \ and self._nm._ip_dec != 0xFFFFFFFE: compare_to_first = self._net_ip._ip_dec compare_to_last = self._bc_ip._ip_dec else: compare_to_first = self._first_ip._ip_dec compare_to_last = self._last_ip._ip_dec if ip._first_ip._ip_dec < compare_to_first or \ ip._last_ip._ip_dec > compare_to_last: return False return True
async def upload_file(self, bucket, file, uploadpath=None, key=None, ContentType=None, **kw): """Upload a file to S3 possibly using the multi-part uploader Return the key uploaded """ is_filename = False if hasattr(file, 'read'): if hasattr(file, 'seek'): file.seek(0) file = file.read() size = len(file) elif key: size = len(file) else: is_filename = True size = os.stat(file).st_size key = os.path.basename(file) assert key, 'key not available' if not ContentType: ContentType, _ = mimetypes.guess_type(key) if uploadpath: if not uploadpath.endswith('/'): uploadpath = '%s/' % uploadpath key = '%s%s' % (uploadpath, key) params = dict(Bucket=bucket, Key=key) if not ContentType: ContentType = 'application/octet-stream' params['ContentType'] = ContentType if size > MULTI_PART_SIZE and is_filename: resp = await _multipart(self, file, params) elif is_filename: with open(file, 'rb') as fp: params['Body'] = fp.read() resp = await self.put_object(**params) else: params['Body'] = file resp = await self.put_object(**params) if 'Key' not in resp: resp['Key'] = key if 'Bucket' not in resp: resp['Bucket'] = bucket return resp
async def copy_storage_object(self, source_bucket, source_key, bucket, key): """Copy a file from one bucket into another """ info = await self.head_object(Bucket=source_bucket, Key=source_key) size = info['ContentLength'] if size > MULTI_PART_SIZE: result = await _multipart_copy(self, source_bucket, source_key, bucket, key, size) else: result = await self.copy_object( Bucket=bucket, Key=key, CopySource=_source_string(source_bucket, source_key) ) return result
def upload_folder(self, bucket, folder, key=None, skip=None, content_types=None): """Recursively upload a ``folder`` into a backet. :param bucket: bucket where to upload the folder to :param folder: the folder location in the local file system :param key: Optional key where the folder is uploaded :param skip: Optional list of files to skip :param content_types: Optional dictionary mapping suffixes to content types :return: a coroutine """ uploader = FolderUploader(self, bucket, folder, key, skip, content_types) return uploader.start()
async def _upload_file(self, full_path): """Coroutine for uploading a single file """ rel_path = os.path.relpath(full_path, self.folder) key = s3_key(os.path.join(self.key, rel_path)) ct = self.content_types.get(key.split('.')[-1]) with open(full_path, 'rb') as fp: file = fp.read() try: await self.botocore.upload_file(self.bucket, file, key=key, ContentType=ct) except Exception as exc: LOGGER.error('Could not upload "%s": %s', key, exc) self.failures[key] = self.all.pop(full_path) return size = self.all.pop(full_path) self.success[key] = size self.total_size += size percentage = 100*(1 - len(self.all)/self.total_files) message = '{0:.0f}% completed - uploaded "{1}" - {2}'.format( percentage, key, convert_bytes(size)) LOGGER.info(message)
def get_paginator(self, operation_name): """Create a paginator for an operation. :type operation_name: string :param operation_name: The operation name. This is the same name as the method name on the client. For example, if the method name is ``create_foo``, and you'd normally invoke the operation as ``client.create_foo(**kwargs)``, if the ``create_foo`` operation can be paginated, you can use the call ``client.get_paginator("create_foo")``. :raise OperationNotPageableError: Raised if the operation is not pageable. You can use the ``client.can_paginate`` method to check if an operation is pageable. :rtype: L{botocore.paginate.Paginator} :return: A paginator object. """ if not self.can_paginate(operation_name): raise OperationNotPageableError(operation_name=operation_name) else: actual_operation_name = self._PY_TO_OP_NAME[operation_name] # substitute iterator with async one Paginator.PAGE_ITERATOR_CLS = AsyncPageIterator paginator = Paginator( getattr(self, operation_name), self._cache['page_config'][actual_operation_name]) return paginator
async def trigger(self, event, data=None, socket_id=None): '''Trigger an ``event`` on this channel ''' json_data = json.dumps(data, cls=self.pusher.encoder) query_string = self.signed_query(event, json_data, socket_id) signed_path = "%s?%s" % (self.path, query_string) pusher = self.pusher absolute_url = pusher.get_absolute_path(signed_path) response = await pusher.http.post( absolute_url, data=json_data, headers=[('Content-Type', 'application/json')]) response.raise_for_status() return response.status_code == 202
async def connect(self): '''Connect to a Pusher websocket ''' if not self._consumer: waiter = self._waiter = asyncio.Future() try: address = self._websocket_host() self.logger.info('Connect to %s', address) self._consumer = await self.http.get(address) if self._consumer.status_code != 101: raise PusherError("Could not connect to websocket") except Exception as exc: waiter.set_exception(exc) raise else: await waiter return self._consumer
def on_message(self, websocket, message): '''Handle websocket incoming messages ''' waiter = self._waiter self._waiter = None encoded = json.loads(message) event = encoded.get('event') channel = encoded.get('channel') data = json.loads(encoded.get('data')) try: if event == PUSHER_ERROR: raise PusherError(data['message'], data['code']) elif event == PUSHER_CONNECTION: self.socket_id = data.get('socket_id') self.logger.info('Succesfully connected on socket %s', self.socket_id) waiter.set_result(self.socket_id) elif event == PUSHER_SUBSCRIBED: self.logger.info('Succesfully subscribed to %s', encoded.get('channel')) elif channel: self[channel]._event(event, data) except Exception as exc: if waiter: waiter.set_exception(exc) else: self.logger.exception('pusher error')
def urlsafe_nopadding_b64decode(data): '''URL safe Base64 decode without padding (=)''' padding = len(data) % 4 if padding != 0: padding = 4 - padding padding = '=' * padding data = data + padding return urlsafe_b64decode(data)
def const_equal(str_a, str_b): '''Constant time string comparison''' if len(str_a) != len(str_b): return False result = True for i in range(len(str_a)): result &= (str_a[i] == str_b[i]) return result
def decode_html_entities(html): """ Decodes a limited set of HTML entities. """ if not html: return html for entity, char in six.iteritems(html_entity_map): html = html.replace(entity, char) return html
def set_signature_passphrases(self, signature_passphrases): '''Set signature passphrases''' self.signature_passphrases = self._update_dict(signature_passphrases, {}, replace_data=True)
def set_encryption_passphrases(self, encryption_passphrases): '''Set encryption passphrases''' self.encryption_passphrases = self._update_dict(encryption_passphrases, {}, replace_data=True)
def set_algorithms(self, signature=None, encryption=None, serialization=None, compression=None): '''Set algorithms used for sealing. Defaults can not be overridden.''' self.signature_algorithms = \ self._update_dict(signature, self.DEFAULT_SIGNATURE) self.encryption_algorithms = \ self._update_dict(encryption, self.DEFAULT_ENCRYPTION) self.serialization_algorithms = \ self._update_dict(serialization, self.DEFAULT_SERIALIZATION) self.compression_algorithms = \ self._update_dict(compression, self.DEFAULT_COMPRESSION)
def get_algorithms(self): '''Get algorithms used for sealing''' return { 'signature': self.signature_algorithms, 'encryption': self.encryption_algorithms, 'serialization': self.serialization_algorithms, 'compression': self.compression_algorithms, }
def _set_options(self, options): '''Private function for setting options used for sealing''' if not options: return self.options.copy() options = options.copy() if 'magic' in options: self.set_magic(options['magic']) del(options['magic']) if 'flags' in options: flags = options['flags'] del(options['flags']) for key, value in flags.iteritems(): if not isinstance(value, bool): raise TypeError('Invalid flag type for: %s' % key) else: flags = self.options['flags'] if 'info' in options: del(options['info']) for key, value in options.iteritems(): if not isinstance(value, int): raise TypeError('Invalid option type for: %s' % key) if value < 0 or value > 255: raise ValueError('Option value out of range for: %s' % key) new_options = self.options.copy() new_options.update(options) new_options['flags'].update(flags) return new_options
def set_magic(self, magic): '''Set magic (prefix)''' if magic is None or isinstance(magic, str): self.magic = magic else: raise TypeError('Invalid value for magic')
def seal(self, data, options=None): '''Seal data''' options = self._set_options(options) data = self._serialize_data(data, options) data = self._compress_data(data, options) data = self._encrypt_data(data, options) data = self._add_header(data, options) data = self._add_magic(data) data = self._sign_data(data, options) data = self._remove_magic(data) data = urlsafe_nopadding_b64encode(data) data = self._add_magic(data) return data