Search is not available for this dataset
text
stringlengths
75
104k
def owsproxy_delegate(request): """ Delegates owsproxy request to external twitcher service. """ twitcher_url = request.registry.settings.get('twitcher.url') protected_path = request.registry.settings.get('twitcher.ows_proxy_protected_path', '/ows') url = twitcher_url + protected_path + '/proxy' if request.matchdict.get('service_name'): url += '/' + request.matchdict.get('service_name') if request.matchdict.get('access_token'): url += '/' + request.matchdict.get('service_name') url += '?' + urlparse.urlencode(request.params) LOGGER.debug("delegate to owsproxy: %s", url) # forward request to target (without Host Header) # h = dict(request.headers) # h.pop("Host", h) resp = requests.request(method=request.method.upper(), url=url, data=request.body, headers=request.headers, verify=False) return Response(resp.content, status=resp.status_code, headers=resp.headers)
def ows_security_tween_factory(handler, registry): """A tween factory which produces a tween which raises an exception if access to OWS service is not allowed.""" security = owssecurity_factory(registry) def ows_security_tween(request): try: security.check_request(request) return handler(request) except OWSException as err: logger.exception("security check failed.") return err except Exception as err: logger.exception("unknown error") return OWSNoApplicableCode("{}".format(err)) return ows_security_tween
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ from pyramid.config import Configurator config = Configurator(settings=settings) # include twitcher components config.include('twitcher.config') config.include('twitcher.frontpage') config.include('twitcher.rpcinterface') config.include('twitcher.owsproxy') # tweens/middleware # TODO: maybe add tween for exception handling or use unknown_failure view config.include('twitcher.tweens') config.scan() return config.make_wsgi_app()
def generate_token(self, valid_in_hours=1, data=None): """ Implementation of :meth:`twitcher.api.ITokenManager.generate_token`. """ data = data or {} access_token = self.tokengenerator.create_access_token( valid_in_hours=valid_in_hours, data=data, ) self.store.save_token(access_token) return access_token.params
def revoke_token(self, token): """ Implementation of :meth:`twitcher.api.ITokenManager.revoke_token`. """ try: self.store.delete_token(token) except Exception: LOGGER.exception('Failed to remove token.') return False else: return True
def revoke_all_tokens(self): """ Implementation of :meth:`twitcher.api.ITokenManager.revoke_all_tokens`. """ try: self.store.clear_tokens() except Exception: LOGGER.exception('Failed to remove tokens.') return False else: return True
def register_service(self, url, data=None, overwrite=True): """ Implementation of :meth:`twitcher.api.IRegistry.register_service`. """ data = data or {} args = dict(data) args['url'] = url service = Service(**args) service = self.store.save_service(service, overwrite=overwrite) return service.params
def unregister_service(self, name): """ Implementation of :meth:`twitcher.api.IRegistry.unregister_service`. """ try: self.store.delete_service(name=name) except Exception: LOGGER.exception('unregister failed') return False else: return True
def get_service_by_name(self, name): """ Implementation of :meth:`twitcher.api.IRegistry.get_service_by_name`. """ try: service = self.store.fetch_by_name(name=name) except Exception: LOGGER.error('Could not get service with name %s', name) return {} else: return service.params
def get_service_by_url(self, url): """ Implementation of :meth:`twitcher.api.IRegistry.get_service_by_url`. """ try: service = self.store.fetch_by_url(url=url) except Exception: LOGGER.error('Could not get service with url %s', url) return {} else: return service.params
def list_services(self): """ Implementation of :meth:`twitcher.api.IRegistry.list_services`. """ try: services = [service.params for service in self.store.list_services()] except Exception: LOGGER.error('List services failed.') return [] else: return services
def clear_services(self): """ Implementation of :meth:`twitcher.api.IRegistry.clear_services`. """ try: self.store.clear_services() except Exception: LOGGER.error('Clear services failed.') return False else: return True
def includeme(config): """ The callable makes it possible to include rpcinterface in a Pyramid application. Calling ``config.include(twitcher.rpcinterface)`` will result in this callable being called. Arguments: * ``config``: the ``pyramid.config.Configurator`` object. """ settings = config.registry.settings if asbool(settings.get('twitcher.rpcinterface', True)): LOGGER.debug('Twitcher XML-RPC Interface enabled.') # include twitcher config config.include('twitcher.config') # using basic auth config.include('twitcher.basicauth') # pyramid xml-rpc # http://docs.pylonsproject.org/projects/pyramid-rpc/en/latest/xmlrpc.html config.include('pyramid_rpc.xmlrpc') config.include('twitcher.db') config.add_xmlrpc_endpoint('api', '/RPC2') # register xmlrpc methods config.add_xmlrpc_method(RPCInterface, attr='generate_token', endpoint='api', method='generate_token') config.add_xmlrpc_method(RPCInterface, attr='revoke_token', endpoint='api', method='revoke_token') config.add_xmlrpc_method(RPCInterface, attr='revoke_all_tokens', endpoint='api', method='revoke_all_tokens') config.add_xmlrpc_method(RPCInterface, attr='register_service', endpoint='api', method='register_service') config.add_xmlrpc_method(RPCInterface, attr='unregister_service', endpoint='api', method='unregister_service') config.add_xmlrpc_method(RPCInterface, attr='get_service_by_name', endpoint='api', method='get_service_by_name') config.add_xmlrpc_method(RPCInterface, attr='get_service_by_url', endpoint='api', method='get_service_by_url') config.add_xmlrpc_method(RPCInterface, attr='clear_services', endpoint='api', method='clear_services') config.add_xmlrpc_method(RPCInterface, attr='list_services', endpoint='api', method='list_services')
def save_service(self, service, overwrite=True): """ Store an OWS service in database. """ name = namesgenerator.get_sane_name(service.name) if not name: name = namesgenerator.get_random_name() if name in self.name_index: name = namesgenerator.get_random_name(retry=True) # check if service is already registered if name in self.name_index: if overwrite: self._delete(name=name) else: raise Exception("service name already registered.") self._insert(Service( name=name, url=baseurl(service.url), type=service.type, purl=service.purl, public=service.public, auth=service.auth, verify=service.verify)) return self.fetch_by_name(name=name)
def list_services(self): """ Lists all services in memory storage. """ my_services = [] for service in self.name_index.values(): my_services.append(Service(service)) return my_services
def fetch_by_name(self, name): """ Get service for given ``name`` from memory storage. """ service = self.name_index.get(name) if not service: raise ServiceNotFound return Service(service)
def _retrieve_certificate(self, access_token, timeout=3): """ Generates a new private key and certificate request, submits the request to be signed by the SLCS CA and returns the certificate. """ logger.debug("Retrieve certificate with token.") # Generate a new key pair key_pair = crypto.PKey() key_pair.generate_key(crypto.TYPE_RSA, 2048) private_key = crypto.dump_privatekey(crypto.FILETYPE_PEM, key_pair).decode("utf-8") # Generate a certificate request using that key-pair cert_request = crypto.X509Req() # Create public key object cert_request.set_pubkey(key_pair) # Add the public key to the request cert_request.sign(key_pair, 'md5') der_cert_req = crypto.dump_certificate_request(crypto.FILETYPE_ASN1, cert_request) encoded_cert_req = base64.b64encode(der_cert_req) # Build the OAuth session object token = {'access_token': access_token, 'token_type': 'Bearer'} client = OAuth2Session(token=token) response = client.post( self.certificate_url, data={'certificate_request': encoded_cert_req}, verify=False, timeout=timeout, ) if response.ok: content = "{} {}".format(response.text, private_key) with open(self.esgf_credentials, 'w') as fh: fh.write(content) logger.debug('Fetched certificate successfully.') else: msg = "Could not get certificate: {} {}".format(response.status_code, response.reason) raise Exception(msg) return True
def tokenstore_factory(registry, database=None): """ Creates a token store with the interface of :class:`twitcher.store.AccessTokenStore`. By default the mongodb implementation will be used. :param database: A string with the store implementation name: "mongodb" or "memory". :return: An instance of :class:`twitcher.store.AccessTokenStore`. """ database = database or 'mongodb' if database == 'mongodb': db = _mongodb(registry) store = MongodbTokenStore(db.tokens) else: store = MemoryTokenStore() return store
def servicestore_factory(registry, database=None): """ Creates a service store with the interface of :class:`twitcher.store.ServiceStore`. By default the mongodb implementation will be used. :return: An instance of :class:`twitcher.store.ServiceStore`. """ database = database or 'mongodb' if database == 'mongodb': db = _mongodb(registry) store = MongodbServiceStore(collection=db.services) else: store = MemoryServiceStore() return store
def get_random_name(retry=False): """ generates a random name from the list of adjectives and birds in this package formatted as "adjective_surname". For example 'loving_sugarbird'. If retry is non-zero, a random integer between 0 and 100 will be added to the end of the name, e.g `loving_sugarbird3` """ name = "%s_%s" % (left[random.randint(0, len(left) - 1)], right[random.randint(0, len(right) - 1)]) if retry is True: name = "%s%d" % (name, random.randint(0, 100)) return name
def _get_param(self, param, allowed_values=None, optional=False): """Get parameter in GET request.""" request_params = self._request_params() if param in request_params: value = request_params[param].lower() if allowed_values is not None: if value in allowed_values: self.params[param] = value else: raise OWSInvalidParameterValue("%s %s is not supported" % (param, value), value=param) elif optional: self.params[param] = None else: raise OWSMissingParameterValue('Parameter "%s" is missing' % param, value=param) return self.params[param]
def _get_version(self): """Find requested version in GET request.""" version = self._get_param(param="version", allowed_values=allowed_versions[self.params['service']], optional=True) if version is None and self._get_request_type() != "getcapabilities": raise OWSMissingParameterValue('Parameter "version" is missing', value="version") else: return version
def _get_service(self): """Check mandatory service name parameter in POST request.""" if "service" in self.document.attrib: value = self.document.attrib["service"].lower() if value in allowed_service_types: self.params["service"] = value else: raise OWSInvalidParameterValue("Service %s is not supported" % value, value="service") else: raise OWSMissingParameterValue('Parameter "service" is missing', value="service") return self.params["service"]
def _get_request_type(self): """Find requested request type in POST request.""" value = self.document.tag.lower() if value in allowed_request_types[self.params['service']]: self.params["request"] = value else: raise OWSInvalidParameterValue("Request type %s is not supported" % value, value="request") return self.params["request"]
def _get_version(self): """Find requested version in POST request.""" if "version" in self.document.attrib: value = self.document.attrib["version"].lower() if value in allowed_versions[self.params['service']]: self.params["version"] = value else: raise OWSInvalidParameterValue("Version %s is not supported" % value, value="version") elif self._get_request_type() == "getcapabilities": self.params["version"] = None else: raise OWSMissingParameterValue('Parameter "version" is missing', value="version") return self.params["version"]
def localize_datetime(dt, tz_name='UTC'): """Provide a timzeone-aware object for a given datetime and timezone name """ tz_aware_dt = dt if dt.tzinfo is None: utc = pytz.timezone('UTC') aware = utc.localize(dt) timezone = pytz.timezone(tz_name) tz_aware_dt = aware.astimezone(timezone) else: logger.warn('tzinfo already set') return tz_aware_dt
def baseurl(url): """ return baseurl of given url """ parsed_url = urlparse.urlparse(url) if not parsed_url.netloc or parsed_url.scheme not in ("http", "https"): raise ValueError('bad url') service_url = "%s://%s%s" % (parsed_url.scheme, parsed_url.netloc, parsed_url.path.strip()) return service_url
def verify(self): """Verify ssl service certificate.""" value = self.get('verify', 'true') if isinstance(value, bool): verify = value elif value.lower() == 'true': verify = True elif value.lower() == 'false': verify = False else: verify = value return verify
def tag(self, label, message=None): """Tag the current workdir state.""" notify.warning('Unsupported SCM: Make sure you apply the "{}" tag after commit!{}'.format( label, ' [message={}]'.format(message) if message else '', ))
def pep440_dev_version(self, verbose=False, non_local=False): """Return a PEP-440 dev version appendix to the main version number.""" # Always return a timestamp pep440 = '.dev{:%Y%m%d%H%M}'.format(datetime.now()) if not non_local: build_number = os.environ.get('BUILD_NUMBER', 'n/a') if build_number.isdigit(): pep440 += '+ci.{}'.format(build_number) if verbose: notify.info("Adding CI build ID #{} to version".format(build_number)) return pep440
def get_egg_info(cfg, verbose=False): """Call 'setup egg_info' and return the parsed meta-data.""" result = Bunch() setup_py = cfg.rootjoin('setup.py') if not os.path.exists(setup_py): return result egg_info = shell.capture("python {} egg_info".format(setup_py), echo=True if verbose else None) for info_line in egg_info.splitlines(): if info_line.endswith('PKG-INFO'): pkg_info_file = info_line.split(None, 1)[1] result['__file__'] = pkg_info_file with io.open(pkg_info_file, encoding='utf-8') as handle: lastkey = None for line in handle: if line.lstrip() != line: assert lastkey, "Bad continuation in PKG-INFO file '{}': {}".format(pkg_info_file, line) result[lastkey] += '\n' + line else: lastkey, value = line.split(':', 1) lastkey = lastkey.strip().lower().replace('-', '_') value = value.strip() if lastkey in result: try: result[lastkey].append(value) except AttributeError: result[lastkey] = [result[lastkey], value] else: result[lastkey] = value for multikey in PKG_INFO_MULTIKEYS: if not isinstance(result.get(multikey, []), list): result[multikey] = [result[multikey]] return result
def bump(ctx, verbose=False, pypi=False): """Bump a development version.""" cfg = config.load() scm = scm_provider(cfg.project_root, commit=False, ctx=ctx) # Check for uncommitted changes if not scm.workdir_is_clean(): notify.warning("You have uncommitted changes, will create a time-stamped version!") pep440 = scm.pep440_dev_version(verbose=verbose, non_local=pypi) # Rewrite 'setup.cfg' TODO: refactor to helper, see also release-prep # with util.rewrite_file(cfg.rootjoin('setup.cfg')) as lines: # ... setup_cfg = cfg.rootjoin('setup.cfg') if not pep440: notify.info("Working directory contains a release version!") elif os.path.exists(setup_cfg): with io.open(setup_cfg, encoding='utf-8') as handle: data = handle.readlines() changed = False for i, line in enumerate(data): if re.match(r"#? *tag_build *= *.*", line): verb, _ = data[i].split('=', 1) data[i] = '{}= {}\n'.format(verb, pep440) changed = True if changed: notify.info("Rewriting 'setup.cfg'...") with io.open(setup_cfg, 'w', encoding='utf-8') as handle: handle.write(''.join(data)) else: notify.warning("No 'tag_build' setting found in 'setup.cfg'!") else: notify.warning("Cannot rewrite 'setup.cfg', none found!") if os.path.exists(setup_cfg): # Update metadata and print version egg_info = shell.capture("python setup.py egg_info", echo=True if verbose else None) for line in egg_info.splitlines(): if line.endswith('PKG-INFO'): pkg_info_file = line.split(None, 1)[1] with io.open(pkg_info_file, encoding='utf-8') as handle: notify.info('\n'.join(i for i in handle.readlines() if i.startswith('Version:')).strip()) ctx.run("python setup.py -q develop", echo=True if verbose else None)
def dist(ctx, devpi=False, egg=False, wheel=False, auto=True): """Distribute the project.""" config.load() cmd = ["python", "setup.py", "sdist"] # Automatically create wheels if possible if auto: egg = sys.version_info.major == 2 try: import wheel as _ wheel = True except ImportError: wheel = False if egg: cmd.append("bdist_egg") if wheel: cmd.append("bdist_wheel") ctx.run("invoke clean --all build --docs test check") ctx.run(' '.join(cmd)) if devpi: ctx.run("devpi upload dist/*")
def pex(ctx, pyrun='', upload=False, opts=''): """Package the project with PEX.""" cfg = config.load() # Build and check release ctx.run(": invoke clean --all build test check") # Get full version pkg_info = get_egg_info(cfg) # from pprint import pprint; pprint(dict(pkg_info)) version = pkg_info.version if pkg_info else cfg.project.version # Build a PEX for each console entry-point pex_files = [] # from pprint import pprint; pprint(cfg.project.entry_points) for script in cfg.project.entry_points['console_scripts']: script, entry_point = script.split('=', 1) script, entry_point = script.strip(), entry_point.strip() pex_file = cfg.rootjoin('bin', '{}-{}.pex'.format(script, version)) cmd = ['pex', '-r', cfg.rootjoin('requirements.txt'), cfg.project_root, '-c', script, '-o', pex_file] if opts: cmd.append(opts) ctx.run(' '.join(cmd)) # Warn about non-portable stuff non_universal = set() with closing(zipfile.ZipFile(pex_file, mode="r")) as pex_contents: for pex_name in pex_contents.namelist(): # pylint: disable=no-member if pex_name.endswith('WHEEL') and '-py2.py3-none-any.whl' not in pex_name: non_universal.add(pex_name.split('.whl')[0].split('/')[-1]) if non_universal: notify.warning("Non-universal or native wheels in PEX '{}':\n {}" .format(pex_file.replace(os.getcwd(), '.'), '\n '.join(sorted(non_universal)))) envs = [i.split('-')[-3:] for i in non_universal] envs = {i[0]: i[1:] for i in envs} if len(envs) > 1: envs = {k: v for k, v in envs.items() if not k.startswith('py')} env_id = [] for k, v in sorted(envs.items()): env_id.append(k) env_id.extend(v) env_id = '-'.join(env_id) else: env_id = 'py2.py3-none-any' new_pex_file = pex_file.replace('.pex', '-{}.pex'.format(env_id)) notify.info("Renamed PEX to '{}'".format(os.path.basename(new_pex_file))) os.rename(pex_file, new_pex_file) pex_file = new_pex_file pex_files.append(pex_file) if not pex_files: notify.warning("No entry points found in project configuration!") else: if pyrun: if any(pyrun.startswith(i) for i in ('http://', 'https://', 'file://')): pyrun_url = pyrun else: pyrun_cfg = dict(ctx.rituals.pyrun) pyrun_cfg.update(parse_qsl(pyrun.replace(os.pathsep, '&'))) pyrun_url = (pyrun_cfg['base_url'] + '/' + pyrun_cfg['archive']).format(**pyrun_cfg) notify.info("Getting PyRun from '{}'...".format(pyrun_url)) with url_as_file(pyrun_url, ext='tgz') as pyrun_tarball: pyrun_tar = tarfile.TarFile.gzopen(pyrun_tarball) for pex_file in pex_files[:]: pyrun_exe = pyrun_tar.extractfile('./bin/pyrun') with open(pex_file, 'rb') as pex_handle: pyrun_pex_file = '{}{}-installer.sh'.format( pex_file[:-4], pyrun_url.rsplit('/egenix')[-1][:-4]) with open(pyrun_pex_file, 'wb') as pyrun_pex: pyrun_pex.write(INSTALLER_BASH.replace('00000', '{:<5d}'.format(len(INSTALLER_BASH) + 1))) shutil.copyfileobj(pyrun_exe, pyrun_pex) shutil.copyfileobj(pex_handle, pyrun_pex) shutil.copystat(pex_file, pyrun_pex_file) notify.info("Wrote PEX installer to '{}'".format(pretty_path(pyrun_pex_file))) pex_files.append(pyrun_pex_file) if upload: base_url = ctx.rituals.release.upload.base_url.rstrip('/') if not base_url: notify.failure("No base URL provided for uploading!") for pex_file in pex_files: url = base_url + '/' + ctx.rituals.release.upload.path.lstrip('/').format( name=cfg.project.name, version=cfg.project.version, filename=os.path.basename(pex_file)) notify.info("Uploading to '{}'...".format(url)) with io.open(pex_file, 'rb') as handle: reply = requests.put(url, data=handle.read()) if reply.status_code in range(200, 300): notify.info("{status_code} {reason}".format(**vars(reply))) else: notify.warning("{status_code} {reason}".format(**vars(reply)))
def prep(ctx, commit=True): """Prepare for a release.""" cfg = config.load() scm = scm_provider(cfg.project_root, commit=commit, ctx=ctx) # Check for uncommitted changes if not scm.workdir_is_clean(): notify.failure("You have uncommitted changes, please commit or stash them!") # TODO Check that changelog entry carries the current date # Rewrite 'setup.cfg' setup_cfg = cfg.rootjoin('setup.cfg') if os.path.exists(setup_cfg): with io.open(setup_cfg, encoding='utf-8') as handle: data = handle.readlines() changed = False for i, line in enumerate(data): if any(line.startswith(i) for i in ('tag_build', 'tag_date')): data[i] = '#' + data[i] changed = True if changed and commit: notify.info("Rewriting 'setup.cfg'...") with io.open(setup_cfg, 'w', encoding='utf-8') as handle: handle.write(''.join(data)) scm.add_file('setup.cfg') elif changed: notify.warning("WOULD rewrite 'setup.cfg', but --no-commit was passed") else: notify.warning("Cannot rewrite 'setup.cfg', none found!") # Update metadata and command stubs ctx.run('python setup.py -q develop -U') # Build a clean dist and check version number version = capture('python setup.py --version') ctx.run('invoke clean --all build --docs release.dist') for distfile in os.listdir('dist'): trailer = distfile.split('-' + version)[1] trailer, _ = os.path.splitext(trailer) if trailer and trailer[0] not in '.-': notify.failure("The version found in 'dist' seems to be" " a pre-release one! [{}{}]".format(version, trailer)) # Commit changes and tag the release scm.commit(ctx.rituals.release.commit.message.format(version=version)) scm.tag(ctx.rituals.release.tag.name.format(version=version), ctx.rituals.release.tag.message.format(version=version))
def pylint(ctx, skip_tests=False, skip_root=False, reports=False): """Perform source code checks via pylint.""" cfg = config.load() add_dir2pypath(cfg.project_root) if not os.path.exists(cfg.testjoin('__init__.py')): add_dir2pypath(cfg.testjoin()) namelist = set() for package in cfg.project.get('packages', []): if '.' not in package: namelist.add(cfg.srcjoin(package)) for module in cfg.project.get('py_modules', []): namelist.add(module + '.py') if not skip_tests: test_py = antglob.FileSet(cfg.testdir, '**/*.py') test_py = [cfg.testjoin(i) for i in test_py] if test_py: namelist |= set(test_py) if not skip_root: root_py = antglob.FileSet('.', '*.py') if root_py: namelist |= set(root_py) namelist = set([i[len(os.getcwd())+1:] if i.startswith(os.getcwd() + os.sep) else i for i in namelist]) cmd = 'pylint' cmd += ' "{}"'.format('" "'.join(sorted(namelist))) cmd += ' --reports={0}'.format('y' if reports else 'n') for cfgfile in ('.pylintrc', 'pylint.rc', 'pylint.cfg', 'project.d/pylint.cfg'): if os.path.exists(cfgfile): cmd += ' --rcfile={0}'.format(cfgfile) break try: shell.run(cmd, report_error=False, runner=ctx.run) notify.info("OK - No problems found by pylint.") except exceptions.Failure as exc: # Check bit flags within pylint return code if exc.result.return_code & 32: # Usage error (internal error in this code) notify.error("Usage error, bad arguments in {}?!".format(repr(cmd))) raise else: bits = { 1: "fatal", 2: "error", 4: "warning", 8: "refactor", 16: "convention", } notify.warning("Some messages of type {} issued by pylint.".format( ", ".join([text for bit, text in bits.items() if exc.result.return_code & bit]) )) if exc.result.return_code & 3: notify.error("Exiting due to fatal / error message.") raise
def workdir_is_clean(self, quiet=False): """ Check for uncommitted changes, return `True` if everything is clean. Inspired by http://stackoverflow.com/questions/3878624/. """ # Update the index self.run('git update-index -q --ignore-submodules --refresh', **RUN_KWARGS) unchanged = True # Disallow unstaged changes in the working tree try: self.run('git diff-files --quiet --ignore-submodules --', report_error=False, **RUN_KWARGS) except exceptions.Failure: unchanged = False if not quiet: notify.warning('You have unstaged changes!') self.run('git diff-files --name-status -r --ignore-submodules -- >&2', **RUN_KWARGS) # Disallow uncommitted changes in the index try: self.run('git diff-index --cached --quiet HEAD --ignore-submodules --', report_error=False, **RUN_KWARGS) except exceptions.Failure: unchanged = False if not quiet: notify.warning('Your index contains uncommitted changes!') self.run('git diff-index --cached --name-status -r --ignore-submodules HEAD -- >&2', **RUN_KWARGS) return unchanged
def tag(self, label, message=None): """Tag the current workdir state.""" options = ' -m "{}" -a'.format(message) if message else '' self.run_elective('git tag{} "{}"'.format(options, label))
def pep440_dev_version(self, verbose=False, non_local=False): """ Return a PEP-440 dev version appendix to the main version number. Result is ``None`` if the workdir is in a release-ready state (i.e. clean and properly tagged). """ version = capture("python setup.py --version", echo=verbose) if verbose: notify.info("setuptools version = '{}'".format(version)) now = '{:%Y%m%d!%H%M}'.format(datetime.now()) tag = capture("git describe --long --tags --dirty='!{}'".format(now), echo=verbose) if verbose: notify.info("git describe = '{}'".format(tag)) try: tag, date, time = tag.split('!') except ValueError: date = time = '' tag, commits, short_hash = tag.rsplit('-', 3) label = tag if re.match(r"v[0-9]+(\.[0-9]+)*", label): label = label[1:] # Make a PEP-440 version appendix, the format is: # [N!]N(.N)*[{a|b|rc}N][.postN][.devN][+<local version label>] if commits == '0' and label == version: pep440 = None else: local_part = [ re.sub(r"[^a-zA-Z0-9]+", '.', label).strip('.'), # reduce to alphanum and dots short_hash, date + ('T' + time if time else ''), ] build_number = os.environ.get('BUILD_NUMBER', 'n/a') if build_number.isdigit(): local_part.extend(['ci', build_number]) if verbose: notify.info("Adding CI build ID #{} to version".format(build_number)) local_part = [i for i in local_part if i] pep440 = '.dev{}+{}'.format(commits, '.'.join(local_part).strip('.')) if non_local: pep440, _ = pep440.split('+', 1) return pep440
def description(_dummy_ctx, markdown=False): """Dump project metadata for Jenkins Description Setter Plugin.""" cfg = config.load() markup = 'md' if markdown else 'html' description_file = cfg.rootjoin("build/project.{}".format(markup)) notify.banner("Creating {} file for Jenkins...".format(description_file)) long_description = cfg.project.long_description long_description = long_description.replace('\n\n', '</p>\n<p>') long_description = re.sub(r'(\W)``([^`]+)``(\W)', r'\1<tt>\2</tt>\3', long_description) text = DESCRIPTION_TEMPLATES[markup].format( keywords=', '.join(cfg.project.keywords), classifiers='\n'.join(cfg.project.classifiers), classifiers_indented=' ' + '\n '.join(cfg.project.classifiers), packages=', '.join(cfg.project.packages), long_description_html='<p>{}</p>'.format(long_description), ##data='\n'.join(["%s=%r" % i for i in cfg.project.iteritems()]), **cfg) with io.open(description_file, 'w', encoding='utf-8') as handle: handle.write(text)
def capture(cmd, **kw): """Run a command and return its stripped captured output.""" kw = kw.copy() kw['hide'] = 'out' if not kw.get('echo', False): kw['echo'] = False ignore_failures = kw.pop('ignore_failures', False) try: return invoke_run(cmd, **kw).stdout.strip() except exceptions.Failure as exc: if not ignore_failures: notify.error("Command `{}` failed with RC={}!".format(cmd, exc.result.return_code,)) raise
def run(cmd, **kw): """Run a command and flush its output.""" kw = kw.copy() kw.setdefault('warn', False) # make extra sure errors don't get silenced report_error = kw.pop('report_error', True) runner = kw.pop('runner', invoke_run) try: return runner(cmd, **kw) except exceptions.Failure as exc: sys.stdout.flush() sys.stderr.flush() if report_error: notify.error("Command `{}` failed with RC={}!".format(cmd, exc.result.return_code,)) raise finally: sys.stdout.flush() sys.stderr.flush()
def auto_detect(workdir): """ Return string signifying the SCM used in the given directory. Currently, 'git' is supported. Anything else returns 'unknown'. """ # Any additions here also need a change to `SCM_PROVIDERS`! if os.path.isdir(os.path.join(workdir, '.git')) and os.path.isfile(os.path.join(workdir, '.git', 'HEAD')): return 'git' return 'unknown'
def provider(workdir, commit=True, **kwargs): """Factory for the correct SCM provider in `workdir`.""" return SCM_PROVIDER[auto_detect(workdir)](workdir, commit=commit, **kwargs)
def fail(message, exitcode=1): """Exit with error code and message.""" sys.stderr.write('ERROR: {}\n'.format(message)) sys.stderr.flush() sys.exit(exitcode)
def get_pypi_auth(configfile='~/.pypirc'): """Read auth from pip config.""" pypi_cfg = ConfigParser() if pypi_cfg.read(os.path.expanduser(configfile)): try: user = pypi_cfg.get('pypi', 'username') pwd = pypi_cfg.get('pypi', 'password') return user, pwd except ConfigError: notify.warning("No PyPI credentials in '{}'," " will fall back to '~/.netrc'...".format(configfile)) return None
def watchdogctl(ctx, kill=False, verbose=True): """Control / check a running Sphinx autobuild process.""" tries = 40 if kill else 0 cmd = 'lsof -i TCP:{} -s TCP:LISTEN -S -Fp 2>/dev/null'.format(ctx.rituals.docs.watchdog.port) pidno = 0 pidinfo = capture(cmd, ignore_failures=True) while pidinfo: pidline = next(filter(None, [re.match(r'^p(\d+)$', x) for x in pidinfo.splitlines()])) if not pidline: raise ValueError("Standard lsof output expected (got {!r})".format(pidinfo)) pidno = int(pidline.group(1), 10) if verbose: ctx.run("ps uw {}".format(pidno), echo=False) verbose = False tries -= 1 if tries <= 0: break else: try: os.kill(pidno, 0) #except ProcessLookupError: # XXX Python3 only # break except OSError as exc: # Python2 has no ProcessLookupError if exc.errno == 3: break raise else: notify.info("Killing PID {}".format(pidno)) ctx.run("kill {}".format(pidno), echo=False) time.sleep(.25) pid = capture(cmd, ignore_failures=True) return pidno
def sphinx(ctx, browse=False, clean=False, watchdog=False, kill=False, status=False, opts=''): """Build Sphinx docs.""" cfg = config.load() if kill or status: if not watchdogctl(ctx, kill=kill): notify.info("No process bound to port {}".format(ctx.rituals.docs.watchdog.port)) return if clean: ctx.run("invoke clean --docs") # Convert markdown files, if applicable for basename in ('README', 'CONTRIBUTING'): markdown = cfg.rootjoin(basename + '.md') if os.path.exists(markdown): try: import pypandoc except ImportError as exc: notify.warning("Can't import 'pandoc' ({})".format(exc)) break else: pypandoc.convert(markdown, 'rst', outputfile=os.path.join(ctx.rituals.docs.sources, basename + '.rst')) # LICENSE file if os.path.exists('LICENSE'): with io.open('LICENSE', 'r') as inp: license_text = inp.read() try: _, copyright_text = cfg.project['long_description'].split('Copyright', 1) except (KeyError, ValueError): copyright_text = cfg.project.get('license', 'N/A') with io.open(os.path.join(ctx.rituals.docs.sources, 'LICENSE.rst'), 'w') as out: out.write( 'Software License\n' '================\n' '\n' ' Copyright {}\n' '\n' 'Full License Text\n' '-----------------\n' '\n' '::\n' '\n' .format(copyright_text) ) license_text = textwrap.dedent(license_text) license_text = '\n '.join(license_text.splitlines()) out.write(' {}\n'.format(license_text)) # Build API docs if cfg.project.get('packages') and str(ctx.rituals.docs.apidoc).lower()[:1] in 't1y': cmd = ['sphinx-apidoc', '-o', 'api', '-f', '-M'] for package in cfg.project.packages: if '.' not in package: cmd.append(cfg.srcjoin(package)) with pushd(ctx.rituals.docs.sources): ctx.run(' '.join(cmd)) # Auto build? cmd = ['sphinx-build', '-b', 'html'] if opts: cmd.append(opts) cmd.extend(['.', ctx.rituals.docs.build]) index_url = index_file = os.path.join(ctx.rituals.docs.sources, ctx.rituals.docs.build, 'index.html') if watchdog: watchdogctl(ctx, kill=True) cmd[0:1] = ['nohup', 'sphinx-autobuild'] cmd.extend([ '-H', ctx.rituals.docs.watchdog.host, '-p', '{}'.format(ctx.rituals.docs.watchdog.port), "-i'{}'".format('*~'), "-i'{}'".format('.*'), "-i'{}'".format('*.log'), ">watchdog.log", "2>&1", "&", ]) index_url = "http://{}:{}/".format(ctx.rituals.docs.watchdog.host, ctx.rituals.docs.watchdog.port) # Build docs notify.info("Starting Sphinx {}build...".format('auto' if watchdog else '')) with pushd(ctx.rituals.docs.sources): ctx.run(' '.join(cmd), pty=not watchdog) # Wait for watchdog to bind to listening port if watchdog: def activity(what=None, i=None): "Helper" if i is None: sys.stdout.write(what + '\n') else: sys.stdout.write(' {} Waiting for {}\r'.format(r'\|/-'[i % 4], what or 'something')) sys.stdout.flush() for i in range(60): activity('server start', i) if watchdogctl(ctx): activity('OK') break time.sleep(1) else: activity('ERR') # trigger first build if os.path.exists(os.path.join(ctx.rituals.docs.sources, 'index.rst')): os.utime(os.path.join(ctx.rituals.docs.sources, 'index.rst'), None) for i in range(60): activity('HTML index file', i) if os.path.exists(index_file): activity('OK') break time.sleep(1) else: activity('ERR') # Open in browser? if browse: time.sleep(1) webbrowser.open_new_tab(index_url)
def confluence(ctx, no_publish=False, clean=False, opts=''): """Build Sphinx docs and publish to Confluence.""" cfg = config.load() if clean: ctx.run("invoke clean --docs") cmd = ['sphinx-build', '-b', 'confluence'] cmd.extend(['-E', '-a']) # force a full rebuild if opts: cmd.append(opts) cmd.extend(['.', ctx.rituals.docs.build + '_cf']) if no_publish: cmd.extend(['-Dconfluence_publish=False']) # Build docs notify.info("Starting Sphinx build...") with pushd(ctx.rituals.docs.sources): ctx.run(' '.join(cmd), pty=True)
def upload(ctx, browse=False, target=None, release='latest'): """Upload a ZIP of built docs (by default to PyPI, else a WebDAV URL).""" cfg = config.load() uploader = DocsUploader(ctx, cfg, target) html_dir = os.path.join(ctx.rituals.docs.sources, ctx.rituals.docs.build) if not os.path.isdir(html_dir): notify.failure("No HTML docs dir found at '{}'!".format(html_dir)) url = uploader.upload(html_dir, release) notify.info("Uploaded docs to '{url}'!".format(url=url or 'N/A')) if url and browse: # Open in browser? webbrowser.open_new_tab(url)
def _zipped(self, docs_base): """ Provide a zipped stream of the docs tree.""" with pushd(docs_base): with tempfile.NamedTemporaryFile(prefix='pythonhosted-', delete=False) as ziphandle: pass zip_name = shutil.make_archive(ziphandle.name, 'zip') notify.info("Uploading {:.1f} MiB from '{}' to '{}'..." .format(os.path.getsize(zip_name) / 1024.0, zip_name, self.target)) with io.open(zip_name, 'rb') as zipread: try: yield zipread finally: os.remove(ziphandle.name) os.remove(ziphandle.name + '.zip')
def _to_pypi(self, docs_base, release): """Upload to PyPI.""" url = None with self._zipped(docs_base) as handle: reply = requests.post(self.params['url'], auth=get_pypi_auth(), allow_redirects=False, files=dict(content=(self.cfg.project.name + '.zip', handle, 'application/zip')), data={':action': 'doc_upload', 'name': self.cfg.project.name}) if reply.status_code in range(200, 300): notify.info("{status_code} {reason}".format(**vars(reply))) elif reply.status_code == 301: url = reply.headers['location'] else: data = self.cfg.copy() data.update(self.params) data.update(vars(reply)) notify.error("{status_code} {reason} for POST to {url}".format(**data)) return url
def _to_webdav(self, docs_base, release): """Upload to WebDAV store.""" try: git_path = subprocess.check_output('git remote get-url origin 2>/dev/null', shell=True) except subprocess.CalledProcessError: git_path = '' else: git_path = git_path.decode('ascii').strip() git_path = git_path.replace('http://', '').replace('https://', '').replace('ssh://', '') git_path = re.search(r'[^:/]+?[:/](.+)', git_path) git_path = git_path.group(1).replace('.git', '') if git_path else '' url = None with self._zipped(docs_base) as handle: url_ns = dict(name=self.cfg.project.name, version=release, git_path=git_path) reply = requests.put(self.params['url'].format(**url_ns), data=handle.read(), headers={'Accept': 'application/json'}) if reply.status_code in range(200, 300): notify.info("{status_code} {reason}".format(**vars(reply))) try: data = reply.json() except ValueError as exc: notify.warning("Didn't get a JSON response! ({})".format(exc)) else: if 'downloadUri' in data: # Artifactory url = data['downloadUri'] + '!/index.html' elif reply.status_code == 301: url = reply.headers['location'] else: data = self.cfg.copy() data.update(self.params) data.update(vars(reply)) notify.error("{status_code} {reason} for PUT to {url}".format(**data)) if not url: notify.warning("Couldn't get URL from upload response!") return url
def upload(self, docs_base, release): """Upload docs in ``docs_base`` to the target of this uploader.""" return getattr(self, '_to_' + self.target)(docs_base, release)
def search_file_upwards(name, base=None): """ Search for a file named `name` from cwd or given directory to root. Return None if nothing's found. """ base = base or os.getcwd() while base != os.path.dirname(base): if os.path.exists(os.path.join(base, name)): return base base = os.path.dirname(base) return None
def add_dir2pypath(path): """Add given directory to PYTHONPATH, e.g. for pylint.""" py_path = os.environ.get('PYTHONPATH', '') if path not in py_path.split(os.pathsep): py_path = ''.join([path, os.pathsep if py_path else '', py_path]) os.environ['PYTHONPATH'] = py_path
def pushd(path): """ A context that enters a given directory and restores the old state on exit. The original directory is returned as the context variable. """ saved = os.getcwd() os.chdir(path) try: yield saved finally: os.chdir(saved)
def url_as_file(url, ext=None): """ Context manager that GETs a given `url` and provides it as a local file. The file is in a closed state upon entering the context, and removed when leaving it, if still there. To give the file name a specific extension, use `ext`; the extension can optionally include a separating dot, otherwise it will be added. Parameters: url (str): URL to retrieve. ext (str, optional): Extension for the generated filename. Yields: str: The path to a temporary file with the content of the URL. Raises: requests.RequestException: Base exception of ``requests``, see its docs for more detailed ones. Example: >>> import io, re, json >>> with url_as_file('https://api.github.com/meta', ext='json') as meta: ... meta, json.load(io.open(meta, encoding='ascii'))['hooks'] (u'/tmp/www-api.github.com-Ba5OhD.json', [u'192.30.252.0/22']) """ if ext: ext = '.' + ext.strip('.') # normalize extension url_hint = 'www-{}-'.format(urlparse(url).hostname or 'any') if url.startswith('file://'): url = os.path.abspath(url[len('file://'):]) if os.path.isabs(url): with open(url, 'rb') as handle: content = handle.read() else: content = requests.get(url).content with tempfile.NamedTemporaryFile(suffix=ext or '', prefix=url_hint, delete=False) as handle: handle.write(content) try: yield handle.name finally: if os.path.exists(handle.name): os.remove(handle.name)
def run(self, cmd, *args, **kwargs): """Run a command.""" runner = self.ctx.run if self.ctx else None return run(cmd, runner=runner, *args, **kwargs)
def run_elective(self, cmd, *args, **kwargs): """Run a command, or just echo it, depending on `commit`.""" if self._commit: return self.run(cmd, *args, **kwargs) else: notify.warning("WOULD RUN: {}".format(cmd)) kwargs = kwargs.copy() kwargs['echo'] = False return self.run('true', *args, **kwargs)
def banner(msg): """Emit a banner just like Invoke's `run(…, echo=True)`.""" if ECHO: _flush() sys.stderr.write("\033[1;7;32;40m{}\033[0m\n".format(msg)) sys.stderr.flush()
def info(msg): """Emit a normal message.""" _flush() sys.stdout.write(msg + '\n') sys.stdout.flush()
def warning(msg): """Emit a warning message.""" _flush() sys.stderr.write("\033[1;7;33;40mWARNING: {}\033[0m\n".format(msg)) sys.stderr.flush()
def error(msg): """Emit an error message to stderr.""" _flush() sys.stderr.write("\033[1;37;41mERROR: {}\033[0m\n".format(msg)) sys.stderr.flush()
def get_devpi_url(ctx): """Get currently used 'devpi' base URL.""" cmd = 'devpi use --urls' lines = ctx.run(cmd, hide='out', echo=False).stdout.splitlines() for line in lines: try: line, base_url = line.split(':', 1) except ValueError: notify.warning('Ignoring "{}"!'.format(line)) else: if line.split()[-1].strip() == 'simpleindex': return base_url.split('\x1b')[0].strip().rstrip('/') raise LookupError("Cannot find simpleindex URL in '{}' output:\n {}".format( cmd, '\n '.join(lines), ))
def get_project_root(): """ Determine location of `tasks.py`.""" try: tasks_py = sys.modules['tasks'] except KeyError: return None else: return os.path.abspath(os.path.dirname(tasks_py.__file__))
def load(): """ Load and return configuration as a ``Bunch``. Values are based on ``DEFAULTS``, and metadata from ``setup.py``. """ cfg = Bunch(DEFAULTS) # TODO: override with contents of [rituals] section in setup.cfg cfg.project_root = get_project_root() if not cfg.project_root: raise RuntimeError("No tasks module is imported, cannot determine project root") cfg.rootjoin = lambda *names: os.path.join(cfg.project_root, *names) cfg.srcjoin = lambda *names: cfg.rootjoin(cfg.srcdir, *names) cfg.testjoin = lambda *names: cfg.rootjoin(cfg.testdir, *names) cfg.cwd = os.getcwd() os.chdir(cfg.project_root) # this assumes an importable setup.py # TODO: maybe call "python setup.py egg_info" for metadata if cfg.project_root not in sys.path: sys.path.append(cfg.project_root) try: from setup import project # pylint: disable=no-name-in-module except ImportError: from setup import setup_args as project # pylint: disable=no-name-in-module cfg.project = Bunch(project) return cfg
def glob2re(part): """Convert a path part to regex syntax.""" return "[^/]*".join( re.escape(bit).replace(r'\[\^', '[^').replace(r'\[', '[').replace(r'\]', ']') for bit in part.split("*") )
def parse_glob(pattern): """Generate parts of regex transformed from glob pattern.""" if not pattern: return bits = pattern.split("/") dirs, filename = bits[:-1], bits[-1] for dirname in dirs: if dirname == "**": yield "(|.+/)" else: yield glob2re(dirname) + "/" yield glob2re(filename)
def compile_glob(spec): """Convert the given glob `spec` to a compiled regex.""" parsed = "".join(parse_glob(spec)) regex = "^{0}$".format(parsed) return re.compile(regex)
def included(self, path, is_dir=False): """Check patterns in order, last match that includes or excludes `path` wins. Return `None` on undecided.""" inclusive = None for pattern in self.patterns: if pattern.is_dir == is_dir and pattern.matches(path): inclusive = pattern.inclusive #print('+++' if inclusive else '---', path, pattern) return inclusive
def walk(self, **kwargs): """ Like `os.walk` and taking the same keyword arguments, but generating paths relative to the root. Starts in the fileset's root and filters based on its patterns. If ``with_root=True`` is passed in, the generated paths include the root path. """ lead = '' if 'with_root' in kwargs and kwargs.pop('with_root'): lead = self.root.rstrip(os.sep) + os.sep for base, dirs, files in os.walk(self.root, **kwargs): prefix = base[len(self.root):].lstrip(os.sep) bits = prefix.split(os.sep) if prefix else [] for dirname in dirs[:]: path = '/'.join(bits + [dirname]) inclusive = self.included(path, is_dir=True) if inclusive: yield lead + path + '/' elif inclusive is False: dirs.remove(dirname) for filename in files: path = '/'.join(bits + [filename]) if self.included(path): yield lead + path
def build(ctx, dput='', opts=''): """Build a DEB package.""" # Get package metadata with io.open('debian/changelog', encoding='utf-8') as changes: metadata = re.match(r'^([^ ]+) \(([^)]+)\) ([^;]+); urgency=(.+)$', changes.readline().rstrip()) if not metadata: notify.failure('Badly formatted top entry in changelog') name, version, _, _ = metadata.groups() # Build package ctx.run('dpkg-buildpackage {} {}'.format(ctx.rituals.deb.build.opts, opts)) # Move created artifacts into "dist" if not os.path.exists('dist'): os.makedirs('dist') artifact_pattern = '{}?{}*'.format(name, re.sub(r'[^-_.a-zA-Z0-9]', '?', version)) changes_files = [] for debfile in glob.glob('../' + artifact_pattern): shutil.move(debfile, 'dist') if debfile.endswith('.changes'): changes_files.append(os.path.join('dist', os.path.basename(debfile))) ctx.run('ls -l dist/{}'.format(artifact_pattern)) if dput: ctx.run('dput {} {}'.format(dput, ' '.join(changes_files)))
def clean(_dummy_ctx, docs=False, backups=False, bytecode=False, dist=False, # pylint: disable=redefined-outer-name all=False, venv=False, tox=False, extra=''): # pylint: disable=redefined-builtin """Perform house-keeping.""" cfg = config.load() notify.banner("Cleaning up project files") # Add patterns based on given parameters venv_dirs = ['bin', 'include', 'lib', 'share', 'local', '.venv'] patterns = ['build/', 'pip-selfcheck.json'] excludes = ['.git/', '.hg/', '.svn/', 'debian/*/'] if docs or all: patterns.extend(['docs/_build/', 'doc/_build/']) if dist or all: patterns.append('dist/') if backups or all: patterns.extend(['**/*~']) if bytecode or all: patterns.extend([ '**/*.py[co]', '**/__pycache__/', '*.egg-info/', cfg.srcjoin('*.egg-info/')[len(cfg.project_root)+1:], ]) if venv: patterns.extend([i + '/' for i in venv_dirs]) if tox: patterns.append('.tox/') else: excludes.append('.tox/') if extra: patterns.extend(shlex.split(extra)) # Build fileset patterns = [antglob.includes(i) for i in patterns] + [antglob.excludes(i) for i in excludes] if not venv: # Do not scan venv dirs when not cleaning them patterns.extend([antglob.excludes(i + '/') for i in venv_dirs]) fileset = antglob.FileSet(cfg.project_root, patterns) # Iterate over matches and remove them for name in fileset: notify.info('rm {0}'.format(name)) if name.endswith('/'): shutil.rmtree(os.path.join(cfg.project_root, name)) else: os.unlink(os.path.join(cfg.project_root, name))
def build(ctx, docs=False): """Build the project.""" cfg = config.load() ctx.run("python setup.py build") if docs: for doc_path in ('docs', 'doc'): if os.path.exists(cfg.rootjoin(doc_path, 'conf.py')): break else: doc_path = None if doc_path: ctx.run("invoke docs") else: notify.warning("Cannot find either a 'docs' or 'doc' Sphinx directory!")
def freeze(ctx, local=False): """Freeze currently installed requirements.""" cmd = 'pip --disable-pip-version-check freeze{}'.format(' --local' if local else '') frozen = ctx.run(cmd, hide='out').stdout.replace('\x1b', '#') with io.open('frozen-requirements.txt', 'w', encoding='ascii') as out: out.write("# Requirements frozen by 'pip freeze' on {}\n".format(isodate())) out.write(frozen) notify.info("Frozen {} requirements.".format(len(frozen.splitlines()),))
def isodate(datestamp=None, microseconds=False): """Return current or given time formatted according to ISO-8601.""" datestamp = datestamp or datetime.datetime.now() if not microseconds: usecs = datetime.timedelta(microseconds=datestamp.microsecond) datestamp = datestamp - usecs return datestamp.isoformat(b' ' if PY2 else u' ')
def _get_registered_executable(exe_name): """Windows allow application paths to be registered in the registry.""" registered = None if sys.platform.startswith('win'): if os.path.splitext(exe_name)[1].lower() != '.exe': exe_name += '.exe' import _winreg # pylint: disable=import-error try: key = "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\" + exe_name value = _winreg.QueryValue(_winreg.HKEY_LOCAL_MACHINE, key) registered = (value, "from HKLM\\"+key) except _winreg.error: pass if registered and not os.path.exists(registered[0]): registered = None return registered
def _cull(potential, matches, verbose=0): """Cull inappropriate matches. Possible reasons: - a duplicate of a previous match - not a disk file - not executable (non-Windows) If 'potential' is approved it is returned and added to 'matches'. Otherwise, None is returned. """ for match in matches: # don't yield duplicates if _samefile(potential[0], match[0]): if verbose: sys.stderr.write("duplicate: %s (%s)\n" % potential) return None if not stat.S_ISREG(os.stat(potential[0]).st_mode): if verbose: sys.stderr.write("not a regular file: %s (%s)\n" % potential) elif not os.access(potential[0], os.X_OK): if verbose: sys.stderr.write("no executable access: %s (%s)\n" % potential) else: matches.append(potential) return potential return None
def whichgen(command, path=None, verbose=0, exts=None): # pylint: disable=too-many-branches, too-many-statements """Return a generator of full paths to the given command. "command" is a the name of the executable to search for. "path" is an optional alternate path list to search. The default it to use the PATH environment variable. "verbose", if true, will cause a 2-tuple to be returned for each match. The second element is a textual description of where the match was found. "exts" optionally allows one to specify a list of extensions to use instead of the standard list for this system. This can effectively be used as an optimization to, for example, avoid stat's of "foo.vbs" when searching for "foo" and you know it is not a VisualBasic script but ".vbs" is on PATHEXT. This option is only supported on Windows. This method returns a generator which yields either full paths to the given command or, if verbose, tuples of the form (<path to command>, <where path found>). """ matches = [] if path is None: using_given_path = 0 path = os.environ.get("PATH", "").split(os.pathsep) if sys.platform.startswith("win"): path.insert(0, os.curdir) # implied by Windows shell else: using_given_path = 1 # Windows has the concept of a list of extensions (PATHEXT env var). if sys.platform.startswith("win"): if exts is None: exts = os.environ.get("PATHEXT", "").split(os.pathsep) # If '.exe' is not in exts then obviously this is Win9x and # or a bogus PATHEXT, then use a reasonable default. for ext in exts: if ext.lower() == ".exe": break else: exts = ['.COM', '.EXE', '.BAT'] elif not isinstance(exts, list): raise TypeError("'exts' argument must be a list or None") else: if exts is not None: raise WhichError("'exts' argument is not supported on platform '%s'" % sys.platform) exts = [] # File name cannot have path separators because PATH lookup does not # work that way. if os.sep in command or os.altsep and os.altsep in command: pass else: for i, dir_name in enumerate(path): # On windows the dir_name *could* be quoted, drop the quotes if sys.platform.startswith("win") and len(dir_name) >= 2 and dir_name[0] == '"' and dir_name[-1] == '"': dir_name = dir_name[1:-1] for ext in ['']+exts: abs_name = os.path.abspath(os.path.normpath(os.path.join(dir_name, command+ext))) if os.path.isfile(abs_name): if using_given_path: from_where = "from given path element %d" % i elif not sys.platform.startswith("win"): from_where = "from PATH element %d" % i elif i == 0: from_where = "from current directory" else: from_where = "from PATH element %d" % (i-1) match = _cull((abs_name, from_where), matches, verbose) if match: if verbose: yield match else: yield match[0] match = _get_registered_executable(command) if match is not None: match = _cull(match, matches, verbose) if match: if verbose: yield match else: yield match[0]
def which(command, path=None, verbose=0, exts=None): """Return the full path to the first match of the given command on the path. "command" is a the name of the executable to search for. "path" is an optional alternate path list to search. The default it to use the PATH environment variable. "verbose", if true, will cause a 2-tuple to be returned. The second element is a textual description of where the match was found. "exts" optionally allows one to specify a list of extensions to use instead of the standard list for this system. This can effectively be used as an optimization to, for example, avoid stat's of "foo.vbs" when searching for "foo" and you know it is not a VisualBasic script but ".vbs" is on PATHEXT. This option is only supported on Windows. If no match is found for the command, a WhichError is raised. """ matched = whichgen(command, path, verbose, exts) try: match = next(matched) except StopIteration: raise WhichError("Could not find '%s' on the path." % command) else: return match
def step(self, key, chain): """ Perform a rachted step, replacing one of the internally managed chains with a new one. :param key: A bytes-like object encoding the key to initialize the replacement chain with. :param chain: The chain to replace. This parameter must be one of the two strings "sending" and "receiving". """ if chain == "sending": self.__previous_sending_chain_length = self.sending_chain_length self.__sending_chain = self.__SendingChain(key) if chain == "receiving": self.__receiving_chain = self.__ReceivingChain(key)
def decryptMessage(self, ciphertext, header, ad = None): """ Decrypt a message using this double ratchet session. :param ciphertext: A bytes-like object encoding the message to decrypt. :param header: An instance of the Header class. This should have been sent together with the ciphertext. :param ad: A bytes-like object encoding the associated data to use for message authentication. Pass None to use the associated data set during construction. :returns: The plaintext. :raises AuthenticationFailedException: If checking the authentication for this message failed. :raises NotInitializedException: If this double ratchet session is not yet initialized with a key pair, thus not prepared to decrypt an incoming message. :raises TooManySavedMessageKeysException: If more than message_key_store_max have to be stored to decrypt this message. """ if ad == None: ad = self.__ad # Try to decrypt the message using a previously saved message key plaintext = self.__decryptSavedMessage(ciphertext, header, ad) if plaintext: return plaintext # Check, whether the public key will trigger a dh ratchet step if self.triggersStep(header.dh_pub): # Save missed message keys for the current receiving chain self.__saveMessageKeys(header.pn) # Perform the step self.step(header.dh_pub) # Save missed message keys for the current receiving chain self.__saveMessageKeys(header.n) # Finally decrypt the message and return the plaintext return self.__decrypt( ciphertext, self.__skr.nextDecryptionKey(), header, ad )
def encryptMessage(self, message, ad = None): """ Encrypt a message using this double ratchet session. :param message: A bytes-like object encoding the message to encrypt. :param ad: A bytes-like object encoding the associated data to use for message authentication. Pass None to use the associated data set during construction. :returns: A dictionary containing the message header and ciphertext. The header is required to synchronize the double ratchet of the receiving party. Send it along with the ciphertext. The returned dictionary consists of two keys: "header", which includes an instance of the Header class and "ciphertext", which includes the encrypted message encoded as a bytes-like object. :raises NotInitializedException: If this double ratchet session is not yet initialized with the other parties public key, thus not ready to encrypt a message to that party. """ if ad == None: ad = self.__ad # Prepare the header for this message header = Header( self.pub, self.__skr.sending_chain_length, self.__skr.previous_sending_chain_length ) # Encrypt the message ciphertext = self.__aead.encrypt( message, self.__skr.nextEncryptionKey(), self._makeAD(header, ad) ) return { "header" : header, "ciphertext" : ciphertext }
def step(self, other_pub): """ Perform a rachted step, calculating a new shared secret from the public key and deriving new chain keys from this secret. New Diffie-Hellman calculations are only performed if the public key is different from the previous one. :param other_pub: A bytes-like object encoding the public key of the other Diffie-Hellman ratchet to synchronize with. """ if self.triggersStep(other_pub): self.__wrapOtherPub(other_pub) self.__newRootKey("receiving") self.__newRatchetKey() self.__newRootKey("sending")
def next(self, data): """ Derive a new set of internal and output data from given input data and the data stored internally. Use the key derivation function to derive new data. The kdf gets supplied with the current key and the data passed to this method. :param data: A bytes-like object encoding the data to pass to the key derivation function. :returns: A bytes-like object encoding the output material. """ self.__length += 1 result = self.__kdf.calculate(self.__key, data, 64) self.__key = result[:32] return result[32:]
def load_and_dump(create_loader, create_dumper, load_and_dump_): """:return: a function that has the doc string of :paramref:`load_and_dump_` additional arguments to this function are passed on to :paramref:`load_and_dump_`. :param create_loader: a loader, e.g. :class:`knittingpattern.Loader.PathLoader` :param create_dumper: a dumper, e.g. :class:`knittingpattern.Dumper.ContentDumper` :param load_and_dump_: a function to call with the loaded content. The arguments to both, :paramref:`create_dumper` and, :paramref:`create_loader` will be passed to :paramref:`load_and_dump_`. Any additional arguments to the return value are also passed to :paramref:`load_and_dump_`. The return value of :paramref:`load_and_dump_` is passed back to the :paramref:`Dumper`. .. seealso:: :func:`decorate_load_and_dump` """ @wraps(load_and_dump_) def load_and_dump__(*args1, **kw): """Return the loader.""" def load(*args2): """Return the dumper.""" def dump(*args3): """Dump the object.""" return load_and_dump_(*(args2 + args3 + args1), **kw) return create_dumper(dump) return create_loader(load) return load_and_dump__
def convert_image_to_knitting_pattern(path, colors=("white", "black")): """Load a image file such as a png bitmap of jpeg file and convert it to a :ref:`knitting pattern file <FileFormatSpecification>`. :param list colors: a list of strings that should be used as :ref:`colors <png-color>`. :param str path: ignore this. It is fulfilled by the loeder. Example: .. code:: python convert_image_to_knitting_pattern().path("image.png").path("image.json") """ image = PIL.Image.open(path) pattern_id = os.path.splitext(os.path.basename(path))[0] rows = [] connections = [] pattern_set = { "version": "0.1", "type": "knitting pattern", "comment": { "source": path }, "patterns": [ { "name": pattern_id, "id": pattern_id, "rows": rows, "connections": connections } ]} bbox = image.getbbox() if not bbox: return pattern_set white = image.getpixel((0, 0)) min_x, min_y, max_x, max_y = bbox last_row_y = None for y in reversed(range(min_y, max_y)): instructions = [] row = {"id": y, "instructions": instructions} rows.append(row) for x in range(min_x, max_x): if image.getpixel((x, y)) == white: color = colors[0] else: color = colors[1] instruction = {"color": color} instructions.append(instruction) if last_row_y is not None: connections.append({"from": {"id": last_row_y}, "to": {"id": y}}) last_row_y = y return pattern_set
def connect_to(self, other_mesh): """Create a connection to an other mesh. .. warning:: Both meshes need to be disconnected and one needs to be a consumed and the other a produced mesh. You can check if a connection is possible using :meth:`can_connect_to`. .. seealso:: :meth:`is_consumed`, :meth:`is_produced`, :meth:`can_connect_to` """ other_mesh.disconnect() self.disconnect() self._connect_to(other_mesh)
def can_connect_to(self, other): """Whether a connection can be established between those two meshes.""" assert other.is_mesh() disconnected = not other.is_connected() and not self.is_connected() types_differ = self._is_consumed_mesh() != other._is_consumed_mesh() return disconnected and types_differ
def new_knitting_pattern_set_loader(specification=DefaultSpecification()): """Create a loader for a knitting pattern set. :param specification: a :class:`specification <knittingpattern.ParsingSpecification.ParsingSpecification>` for the knitting pattern set, default :class:`DefaultSpecification` """ parser = specification.new_parser(specification) loader = specification.new_loader(parser.knitting_pattern_set) return loader
def walk(knitting_pattern): """Walk the knitting pattern in a right-to-left fashion. :return: an iterable to walk the rows :rtype: list :param knittingpattern.KnittingPattern.KnittingPattern knitting_pattern: a knitting pattern to take the rows from """ rows_before = {} # key consumes from values free_rows = [] walk = [] for row in knitting_pattern.rows: rows_before_ = row.rows_before[:] if rows_before_: rows_before[row] = rows_before_ else: free_rows.append(row) assert free_rows while free_rows: # print("free rows:", free_rows) row = free_rows.pop(0) walk.append(row) assert row not in rows_before for freed_row in reversed(row.rows_after): todo = rows_before[freed_row] # print(" freed:", freed_row, todo) todo.remove(row) if not todo: del rows_before[freed_row] free_rows.insert(0, freed_row) assert not rows_before, "everything is walked" return walk
def knitting_pattern(self, specification=None): """loads a :class:`knitting pattern <knittingpattern.KnittingPattern.KnittingPattern>` from the dumped content :param specification: a :class:`~knittingpattern.ParsingSpecification.ParsingSpecification` or :obj:`None` to use the default specification""" from ..ParsingSpecification import new_knitting_pattern_set_loader if specification is None: loader = new_knitting_pattern_set_loader() else: loader = new_knitting_pattern_set_loader(specification) return loader.object(self.object())
def string(self): """:return: the dump as a string""" if self.__text_is_expected: return self._string() else: return self._bytes().decode(self.__encoding)
def _string(self): """:return: the string from a :class:`io.StringIO`""" file = StringIO() self.__dump_to_file(file) file.seek(0) return file.read()
def bytes(self): """:return: the dump as bytes.""" if self.__text_is_expected: return self.string().encode(self.__encoding) else: return self._bytes()
def _bytes(self): """:return: bytes from a :class:`io.BytesIO`""" file = BytesIO() self.__dump_to_file(file) file.seek(0) return file.read()
def file(self, file=None): """Saves the dump in a file-like object in text mode. :param file: :obj:`None` or a file-like object. :return: a file-like object If :paramref:`file` is :obj:`None`, a new :class:`io.StringIO` is returned. If :paramref:`file` is not :obj:`None` it should be a file-like object. The content is written to the file. After writing, the file's read/write position points behind the dumped content. """ if file is None: file = StringIO() self._file(file) return file
def _file(self, file): """Dump the content to a `file`. """ if not self.__text_is_expected: file = BytesWrapper(file, self.__encoding) self.__dump_to_file(file)
def binary_file(self, file=None): """Same as :meth:`file` but for binary content.""" if file is None: file = BytesIO() self._binary_file(file) return file