Search is not available for this dataset
text
stringlengths 75
104k
|
---|
def scaffold():
"""Start a new site."""
click.echo("A whole new site? Awesome.")
title = click.prompt("What's the title?")
url = click.prompt("Great. What's url? http://")
# Make sure that title doesn't exist.
click.echo("Got it. Creating %s..." % url) |
def publish():
"""Publish the site"""
try:
build_site(dev_mode=False, clean=True)
click.echo('Deploying the site...')
# call("firebase deploy", shell=True)
call("rsync -avz -e ssh --progress %s/ %s" % (BUILD_DIR, CONFIG["scp_target"],), shell=True)
if "cloudflare" in CONFIG and "purge" in CONFIG["cloudflare"] and CONFIG["cloudflare"]["purge"]:
do_purge()
except (KeyboardInterrupt, SystemExit):
raise
sys.exit(1) |
def promote():
"""Schedule all the social media posts."""
if "BUFFER_ACCESS_TOKEN" not in os.environ:
warn("Missing BUFFER_ACCESS_TOKEN.")
echo("To publish to social medial, you'll need an access token for buffer.")
echo("The simplest way to get one is to create a new app here: https://buffer.com/developers/apps")
echo("The token you want is the 'Access Token'")
echo("Once you have it, make it available to ink by putting it in the environment.")
# GET https://api.bufferapp.com/1/profiles.json
echo("Verifying available profiles on buffer")
profiles = buffer_get("/1/profiles.json")
for p in profiles:
supported_profile = False
if p["formatted_service"].lower() == "facebook" or p["formatted_service"].lower() == "facebook page":
facebook_profiles.append(p)
supported_profile = True
elif p["formatted_service"].lower() == "twitter":
twitter_profiles.append(p)
supported_profile = True
if supported_profile:
click.secho(u"✓ %s: %s" % (p["formatted_service"], p["formatted_username"]), fg="green")
echo("Checking publication status...")
site_json_filename = os.path.join(ROOT_DIR, BUILD_DIR, "static", "private.json")
with open(site_json_filename, "r") as site_json:
site = load(site_json)
echo('Reviewing social posts...')
posts = {}
unpublished_posts = []
for dirpath, dirnames, filenames in os.walk(os.path.join(ROOT_DIR, "posts"), topdown=False):
for filename in filenames:
if "piece.md" in filename:
if exists(dirpath, "social.yml") and exists(dirpath, "meta.yml"):
with open(os.path.join(dirpath, "social.yml")) as f:
social = load(f)
with open(os.path.join(dirpath, "meta.yml")) as f:
meta = load(f)
if "url" in meta:
site_json_entry = None
for sp in site["posts"]:
if meta["url"] == sp["url"]:
site_json_entry = sp
break
posts[meta["url"]] = {
"meta": meta,
"social": social,
"dirpath": dirpath,
"site": site_json_entry,
}
if "published" not in social or social["published"] is not True:
unpublished_posts.append(meta["url"])
else:
warn("No url found for %s" % dirpath.replace(ROOT_DIR))
automark_set = False
automark = None
for u in unpublished_posts:
post = posts[u]
if "posts" in post["social"] and post["social"]["posts"] and len(post["social"]["posts"]) > 0:
facebook_posts = []
twitter_posts = []
mark_as_published = False
has_valid_post = False
for p in post["social"]["posts"]:
try:
if len(p.keys()) != 1:
error("Something's formatted wrong in %s's social.yml" % u)
break
if p.keys()[0] == "facebook":
facebook_posts.append(p["facebook"])
if post_in_future(p["facebook"], post):
has_valid_post = True
elif p.keys()[0] == "twitter":
if post_in_future(p["twitter"], post):
has_valid_post = True
twitter_posts.append(p["twitter"])
else:
warn("Unknown post type: %s. Skipping." % p.keys()[0])
except:
error("Error parsing social.yml for \"%s\"" % post["meta"]["title"])
import traceback
traceback.print_exc()
if not has_valid_post:
if automark:
mark_as_published = True
else:
warn('"%s" hasn\'t been published, but all posts are in the past.' % post["meta"]["title"])
if click.confirm("Mark as published?"):
mark_as_published = True
if not automark_set:
if click.confirm("Mark all other similar posts as published?"):
automark = True
automark_set = True
else:
echo('\n"%s" hasn\'t been published to social media.' % post["meta"]["title"])
if len(facebook_posts) > 0:
echo(" Facebook:")
for p in facebook_posts:
if (len(p["content"]) > 40):
truncated_content = "%s..." % p["content"][:40]
else:
truncated_content = p["content"]
if post_in_future(p, post):
echo(" - %s: \"%s\"" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
))
else:
warn(" - %s: \"%s\" skipping (past)" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
))
echo(" Twitter:")
if len(twitter_posts) > 0:
for p in twitter_posts:
if (len(p["content"]) > 40):
truncated_content = "%s..." % p["content"][:40]
else:
truncated_content = p["content"]
if post_in_future(p, post):
echo(" - %s: \"%s\"" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
))
else:
warn(" - %s: \"%s\" skipping (past)" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
))
if click.confirm(click.style(" Publish now?", fg="green")):
mark_as_published = True
echo(" Publishing...")
for p in facebook_posts:
if post_in_future(p, post):
publish_facebook(p, post)
if (len(p["content"]) > 40):
truncated_content = "%s..." % p["content"][:40]
else:
truncated_content = p["content"]
click.secho(u" ✓ Facebook %s: \"%s\"" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
), fg="green")
for p in twitter_posts:
if post_in_future(p, post):
publish_twitter(p, post)
if (len(p["content"]) > 40):
truncated_content = "%s..." % p["content"][:40]
else:
truncated_content = p["content"]
click.secho(u" ✓ Twitter %s: \"%s\"" % (
publish_datetime(p, post).strftime("%c"),
truncated_content,
), fg="green")
echo(" Published.")
# Save as published.
if mark_as_published or automark:
post["social"]["published"] = True
with open(os.path.join(post["dirpath"], "social.yml"), "w") as f:
dump(post["social"], f, default_flow_style=False, width=1000)
if click.confirm("Publish your entire backlog to buffer?"):
print ("dope") |
def get_branches(self):
"""Returns a list of the branches"""
return [self._sanitize(branch)
for branch in self._git.branch(color="never").splitlines()] |
def get_current_branch(self):
"""Returns the currently active branch"""
return next((self._sanitize(branch)
for branch in self._git.branch(color="never").splitlines()
if branch.startswith('*')),
None) |
def create_patch(self, from_tag, to_tag):
"""Create a patch between tags"""
return str(self._git.diff('{}..{}'.format(from_tag, to_tag), _tty_out=False)) |
def one(func, n=0):
"""
Create a callable that applies ``func`` to a value in a sequence.
If the value is not a sequence or is an empty sequence then ``None`` is
returned.
:type func: `callable`
:param func: Callable to be applied to each result.
:type n: `int`
:param n: Index of the value to apply ``func`` to.
"""
def _one(result):
if _isSequenceTypeNotText(result) and len(result) > n:
return func(result[n])
return None
return maybe(_one) |
def many(func):
"""
Create a callable that applies ``func`` to every value in a sequence.
If the value is not a sequence then an empty list is returned.
:type func: `callable`
:param func: Callable to be applied to the first result.
"""
def _many(result):
if _isSequenceTypeNotText(result):
return map(func, result)
return []
return maybe(_many, default=[]) |
def Text(value, encoding=None):
"""
Parse a value as text.
:type value: `unicode` or `bytes`
:param value: Text value to parse
:type encoding: `bytes`
:param encoding: Encoding to treat ``bytes`` values as, defaults to
``utf-8``.
:rtype: `unicode`
:return: Parsed text or ``None`` if ``value`` is neither `bytes` nor
`unicode`.
"""
if encoding is None:
encoding = 'utf-8'
if isinstance(value, bytes):
return value.decode(encoding)
elif isinstance(value, unicode):
return value
return None |
def Integer(value, base=10, encoding=None):
"""
Parse a value as an integer.
:type value: `unicode` or `bytes`
:param value: Text value to parse
:type base: `unicode` or `bytes`
:param base: Base to assume ``value`` is specified in.
:type encoding: `bytes`
:param encoding: Encoding to treat ``bytes`` values as, defaults to
``utf-8``.
:rtype: `int`
:return: Parsed integer or ``None`` if ``value`` could not be parsed as an
integer.
"""
try:
return int(Text(value, encoding), base)
except (TypeError, ValueError):
return None |
def Boolean(value, true=(u'yes', u'1', u'true'), false=(u'no', u'0', u'false'),
encoding=None):
"""
Parse a value as a boolean.
:type value: `unicode` or `bytes`
:param value: Text value to parse.
:type true: `tuple` of `unicode`
:param true: Values to compare, ignoring case, for ``True`` values.
:type false: `tuple` of `unicode`
:param false: Values to compare, ignoring case, for ``False`` values.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `bool`
:return: Parsed boolean or ``None`` if ``value`` did not match ``true`` or
``false`` values.
"""
value = Text(value, encoding)
if value is not None:
value = value.lower().strip()
if value in true:
return True
elif value in false:
return False
return None |
def Delimited(value, parser=Text, delimiter=u',', encoding=None):
"""
Parse a value as a delimited list.
:type value: `unicode` or `bytes`
:param value: Text value to parse.
:type parser: `callable` taking a `unicode` parameter
:param parser: Callable to map over the delimited text values.
:type delimiter: `unicode`
:param delimiter: Delimiter text.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `list`
:return: List of parsed values.
"""
value = Text(value, encoding)
if value is None or value == u'':
return []
return map(parser, value.split(delimiter)) |
def Timestamp(value, _divisor=1., tz=UTC, encoding=None):
"""
Parse a value as a POSIX timestamp in seconds.
:type value: `unicode` or `bytes`
:param value: Text value to parse, which should be the number of seconds
since the epoch.
:type _divisor: `float`
:param _divisor: Number to divide the value by.
:type tz: `tzinfo`
:param tz: Timezone, defaults to UTC.
:type encoding: `bytes`
:param encoding: Encoding to treat `bytes` values as, defaults to
``utf-8``.
:rtype: `datetime.datetime`
:return: Parsed datetime or ``None`` if ``value`` could not be parsed.
"""
value = Float(value, encoding)
if value is not None:
value = value / _divisor
return datetime.fromtimestamp(value, tz)
return None |
def parse(expected, query):
"""
Parse query parameters.
:type expected: `dict` mapping `bytes` to `callable`
:param expected: Mapping of query argument names to argument parsing
callables.
:type query: `dict` mapping `bytes` to `list` of `bytes`
:param query: Mapping of query argument names to lists of argument values,
this is the form that Twisted Web's `IRequest.args
<twisted:twisted.web.iweb.IRequest.args>` value takes.
:rtype: `dict` mapping `bytes` to `object`
:return: Mapping of query argument names to parsed argument values.
"""
return dict(
(key, parser(query.get(key, [])))
for key, parser in expected.items()) |
def put(self, metrics):
"""
Put metrics to cloudwatch. Metric shoult be instance or list of
instances of CloudWatchMetric
"""
if type(metrics) == list:
for metric in metrics:
self.c.put_metric_data(**metric)
else:
self.c.put_metric_data(**metrics) |
def _renderResource(resource, request):
"""
Render a given resource.
See `IResource.render <twisted:twisted.web.resource.IResource.render>`.
"""
meth = getattr(resource, 'render_' + nativeString(request.method), None)
if meth is None:
try:
allowedMethods = resource.allowedMethods
except AttributeError:
allowedMethods = _computeAllowedMethods(resource)
raise UnsupportedMethod(allowedMethods)
return meth(request) |
def _adaptToResource(self, result):
"""
Adapt a result to `IResource`.
Several adaptions are tried they are, in order: ``None``,
`IRenderable <twisted:twisted.web.iweb.IRenderable>`, `IResource
<twisted:twisted.web.resource.IResource>`, and `URLPath
<twisted:twisted.python.urlpath.URLPath>`. Anything else is returned as
is.
A `URLPath <twisted:twisted.python.urlpath.URLPath>` is treated as
a redirect.
"""
if result is None:
return NotFound()
spinneretResource = ISpinneretResource(result, None)
if spinneretResource is not None:
return SpinneretResource(spinneretResource)
renderable = IRenderable(result, None)
if renderable is not None:
return _RenderableResource(renderable)
resource = IResource(result, None)
if resource is not None:
return resource
if isinstance(result, URLPath):
return Redirect(str(result))
return result |
def _handleRenderResult(self, request, result):
"""
Handle the result from `IResource.render`.
If the result is a `Deferred` then return `NOT_DONE_YET` and add
a callback to write the result to the request when it arrives.
"""
def _requestFinished(result, cancel):
cancel()
return result
if not isinstance(result, Deferred):
result = succeed(result)
def _whenDone(result):
render = getattr(result, 'render', lambda request: result)
renderResult = render(request)
if renderResult != NOT_DONE_YET:
request.write(renderResult)
request.finish()
return result
request.notifyFinish().addBoth(_requestFinished, result.cancel)
result.addCallback(self._adaptToResource)
result.addCallback(_whenDone)
result.addErrback(request.processingFailed)
return NOT_DONE_YET |
def _negotiateHandler(self, request):
"""
Negotiate a handler based on the content types acceptable to the
client.
:rtype: 2-`tuple` of `twisted.web.iweb.IResource` and `bytes`
:return: Pair of a resource and the content type.
"""
accept = _parseAccept(request.requestHeaders.getRawHeaders('Accept'))
for contentType in accept.keys():
handler = self._acceptHandlers.get(contentType.lower())
if handler is not None:
return handler, handler.contentType
if self._fallback:
handler = self._handlers[0]
return handler, handler.contentType
return NotAcceptable(), None |
def _parseAccept(headers):
"""
Parse and sort an ``Accept`` header.
The header is sorted according to the ``q`` parameter for each header value.
@rtype: `OrderedDict` mapping `bytes` to `dict`
@return: Mapping of media types to header parameters.
"""
def sort(value):
return float(value[1].get('q', 1))
return OrderedDict(sorted(_splitHeaders(headers), key=sort, reverse=True)) |
def _splitHeaders(headers):
"""
Split an HTTP header whose components are separated with commas.
Each component is then split on semicolons and the component arguments
converted into a `dict`.
@return: `list` of 2-`tuple` of `bytes`, `dict`
@return: List of header arguments and mapping of component argument names
to values.
"""
return [cgi.parse_header(value)
for value in chain.from_iterable(
s.split(',') for s in headers
if s)] |
def contentEncoding(requestHeaders, encoding=None):
"""
Extract an encoding from a ``Content-Type`` header.
@type requestHeaders: `twisted.web.http_headers.Headers`
@param requestHeaders: Request headers.
@type encoding: `bytes`
@param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one. Defaults to ``UTF-8``.
@rtype: `bytes`
@return: Content encoding.
"""
if encoding is None:
encoding = b'utf-8'
headers = _splitHeaders(
requestHeaders.getRawHeaders(b'Content-Type', []))
if headers:
return headers[0][1].get(b'charset', encoding)
return encoding |
def maybe(f, default=None):
"""
Create a nil-safe callable decorator.
If the wrapped callable receives ``None`` as its argument, it will return
``None`` immediately.
"""
@wraps(f)
def _maybe(x, *a, **kw):
if x is None:
return default
return f(x, *a, **kw)
return _maybe |
def settings(path=None, with_path=None):
"""
Get or set `Settings._wrapped`
:param str path: a python module file,
if user set it,write config to `Settings._wrapped`
:param str with_path: search path
:return: A instance of `Settings`
"""
if path:
Settings.bind(path, with_path=with_path)
return Settings._wrapped |
def bind(mod_path, with_path=None):
"""
bind user variable to `_wrapped`
.. note::
you don't need call this method by yourself.
program will call it in `cliez.parser.parse`
.. expection::
if path is not correct,will cause an `ImportError`
:param str mod_path: module path, *use dot style,'mod.mod1'*
:param str with_path: add path to `sys.path`,
if path is file,use its parent.
:return: A instance of `Settings`
"""
if with_path:
if os.path.isdir(with_path):
sys.path.insert(0, with_path)
else:
sys.path.insert(0, with_path.rsplit('/', 2)[0])
pass
# raise `ImportError` mod_path if not exist
mod = importlib.import_module(mod_path)
settings = Settings()
for v in dir(mod):
if v[0] == '_' or type(getattr(mod, v)).__name__ == 'module':
continue
setattr(settings, v, getattr(mod, v))
pass
Settings._path = mod_path
Settings._wrapped = settings
return settings |
def get_version():
"""
Get the version from version module without importing more than
necessary.
"""
version_module_path = os.path.join(
os.path.dirname(__file__), "txspinneret", "_version.py")
# The version module contains a variable called __version__
with open(version_module_path) as version_module:
exec(version_module.read())
return locals()["__version__"] |
def send(self, use_open_peers=True, queue=True, **kw):
"""
send a transaction immediately. Failed transactions are picked up by the TxBroadcaster
:param ip: specific peer IP to send tx to
:param port: port of specific peer
:param use_open_peers: use Arky's broadcast method
"""
if not use_open_peers:
ip = kw.get('ip')
port = kw.get('port')
peer = 'http://{}:{}'.format(ip, port)
res = arky.rest.POST.peer.transactions(peer=peer, transactions=[self.tx.tx])
else:
res = arky.core.sendPayload(self.tx.tx)
if self.tx.success != '0.0%':
self.tx.error = None
self.tx.success = True
else:
self.tx.error = res['messages']
self.tx.success = False
self.tx.tries += 1
self.tx.res = res
if queue:
self.tx.send = True
self.__save()
return res |
def check_confirmations_or_resend(self, use_open_peers=False, **kw):
"""
check if a tx is confirmed, else resend it.
:param use_open_peers: select random peers fro api/peers endpoint
"""
if self.confirmations() == 0:
self.send(use_open_peers, **kw) |
def command_list():
"""
Get sub-command list
.. note::
Don't use logger handle this function errors.
Because the error should be a code error,not runtime error.
:return: `list` matched sub-parser
"""
from cliez.conf import COMPONENT_ROOT
root = COMPONENT_ROOT
if root is None:
sys.stderr.write("cliez.conf.COMPONENT_ROOT not set.\n")
sys.exit(2)
pass
if not os.path.exists(root):
sys.stderr.write(
"please set a valid path for `cliez.conf.COMPONENT_ROOT`\n")
sys.exit(2)
pass
try:
path = os.listdir(os.path.join(root, 'components'))
return [f[:-3] for f in path if
f.endswith('.py') and f != '__init__.py']
except FileNotFoundError:
return [] |
def append_arguments(klass, sub_parsers, default_epilog, general_arguments):
"""
Add class options to argparser options.
:param cliez.component.Component klass: subclass of Component
:param Namespace sub_parsers:
:param str default_epilog: default_epilog
:param list general_arguments: global options, defined by user
:return: Namespace subparser
"""
entry_name = hump_to_underscore(klass.__name__).replace(
'_component',
'')
# set sub command document
epilog = default_epilog if default_epilog \
else 'This tool generate by `cliez` ' \
'https://www.github.com/wangwenpei/cliez'
sub_parser = sub_parsers.add_parser(entry_name, help=klass.__doc__,
epilog=epilog)
sub_parser.description = klass.add_arguments.__doc__
# add slot arguments
if hasattr(klass, 'add_slot_args'):
slot_args = klass.add_slot_args() or []
for v in slot_args:
sub_parser.add_argument(*v[0], **v[1])
sub_parser.description = klass.add_slot_args.__doc__
pass
user_arguments = klass.add_arguments() or []
for v in user_arguments:
sub_parser.add_argument(*v[0], **v[1])
if not klass.exclude_global_option:
for v in general_arguments:
sub_parser.add_argument(*v[0], **v[1])
return sub_parser |
def parse(parser, argv=None, settings_key='settings', no_args_func=None):
"""
parser cliez app
:param argparse.ArgumentParser parser: an instance
of argparse.ArgumentParser
:param argv: argument list,default is `sys.argv`
:type argv: list or tuple
:param str settings: settings option name,
default is settings.
:param object no_args_func: a callable object.if no sub-parser matched,
parser will call it.
:return: an instance of `cliez.component.Component` or its subclass
"""
argv = argv or sys.argv
commands = command_list()
if type(argv) not in [list, tuple]:
raise TypeError("argv only can be list or tuple")
# match sub-parser
if len(argv) >= 2 and argv[1] in commands:
sub_parsers = parser.add_subparsers()
class_name = argv[1].capitalize() + 'Component'
from cliez.conf import (COMPONENT_ROOT,
LOGGING_CONFIG,
EPILOG,
GENERAL_ARGUMENTS)
sys.path.insert(0, os.path.dirname(COMPONENT_ROOT))
mod = importlib.import_module(
'{}.components.{}'.format(os.path.basename(COMPONENT_ROOT),
argv[1]))
# dynamic load component
klass = getattr(mod, class_name)
sub_parser = append_arguments(klass, sub_parsers, EPILOG,
GENERAL_ARGUMENTS)
options = parser.parse_args(argv[1:])
settings = Settings.bind(
getattr(options, settings_key)
) if settings_key and hasattr(options, settings_key) else None
obj = klass(parser, sub_parser, options, settings)
# init logger
logger_level = logging.CRITICAL
if hasattr(options, 'verbose'):
if options.verbose == 1:
logger_level = logging.ERROR
elif options.verbose == 2:
logger_level = logging.WARNING
elif options.verbose == 3:
logger_level = logging.INFO
obj.logger.setLevel(logging.INFO)
pass
if hasattr(options, 'debug') and options.debug:
logger_level = logging.DEBUG
# http lib use a strange way to logging
try:
import http.client as http_client
http_client.HTTPConnection.debuglevel = 1
except Exception:
# do nothing
pass
pass
loggers = LOGGING_CONFIG['loggers']
for k, v in loggers.items():
v.setdefault('level', logger_level)
if logger_level in [logging.INFO, logging.DEBUG]:
v['handlers'] = ['stdout']
pass
logging_config.dictConfig(LOGGING_CONFIG)
# this may not necessary
# obj.logger.setLevel(logger_level)
obj.run(options)
# return object to make unit test easy
return obj
# print all sub commands when user set.
if not parser.description and len(commands):
sub_parsers = parser.add_subparsers()
[sub_parsers.add_parser(v) for v in commands]
pass
pass
options = parser.parse_args(argv[1:])
if no_args_func and callable(no_args_func):
return no_args_func(options)
else:
parser._print_message("nothing to do...\n")
pass |
def include_file(filename, global_vars=None, local_vars=None):
"""
.. deprecated 2.1::
Don't use this any more.
It's not pythonic.
include file like php include.
include is very useful when we need to split large config file
"""
if global_vars is None:
global_vars = sys._getframe(1).f_globals
if local_vars is None:
local_vars = sys._getframe(1).f_locals
with open(filename, 'r') as f:
code = compile(f.read(), os.path.basename(filename), 'exec')
exec(code, global_vars, local_vars)
pass |
def hump_to_underscore(name):
"""
Convert Hump style to underscore
:param name: Hump Character
:return: str
"""
new_name = ''
pos = 0
for c in name:
if pos == 0:
new_name = c.lower()
elif 65 <= ord(c) <= 90:
new_name += '_' + c.lower()
pass
else:
new_name += c
pos += 1
pass
return new_name |
def get_fuel_prices(self) -> GetFuelPricesResponse:
"""Fetches fuel prices for all stations."""
response = requests.get(
'{}/prices'.format(API_URL_BASE),
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
return GetFuelPricesResponse.deserialize(response.json()) |
def get_fuel_prices_for_station(
self,
station: int
) -> List[Price]:
"""Gets the fuel prices for a specific fuel station."""
response = requests.get(
'{}/prices/station/{}'.format(API_URL_BASE, station),
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
return [Price.deserialize(data) for data in data['prices']] |
def get_fuel_prices_within_radius(
self, latitude: float, longitude: float, radius: int,
fuel_type: str, brands: Optional[List[str]] = None
) -> List[StationPrice]:
"""Gets all the fuel prices within the specified radius."""
if brands is None:
brands = []
response = requests.post(
'{}/prices/nearby'.format(API_URL_BASE),
json={
'fueltype': fuel_type,
'latitude': latitude,
'longitude': longitude,
'radius': radius,
'brand': brands,
},
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
stations = {
station['code']: Station.deserialize(station)
for station in data['stations']
}
station_prices = [] # type: List[StationPrice]
for serialized_price in data['prices']:
price = Price.deserialize(serialized_price)
station_prices.append(StationPrice(
price=price,
station=stations[price.station_code]
))
return station_prices |
def get_fuel_price_trends(self, latitude: float, longitude: float,
fuel_types: List[str]) -> PriceTrends:
"""Gets the fuel price trends for the given location and fuel types."""
response = requests.post(
'{}/prices/trends/'.format(API_URL_BASE),
json={
'location': {
'latitude': latitude,
'longitude': longitude,
},
'fueltypes': [{'code': type} for type in fuel_types],
},
headers=self._get_headers(),
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
data = response.json()
return PriceTrends(
variances=[
Variance.deserialize(variance)
for variance in data['Variances']
],
average_prices=[
AveragePrice.deserialize(avg_price)
for avg_price in data['AveragePrices']
]
) |
def get_reference_data(
self,
modified_since: Optional[datetime.datetime] = None
) -> GetReferenceDataResponse:
"""
Fetches API reference data.
:param modified_since: The response will be empty if no
changes have been made to the reference data since this
timestamp, otherwise all reference data will be returned.
"""
if modified_since is None:
modified_since = datetime.datetime(year=2010, month=1, day=1)
response = requests.get(
'{}/lovs'.format(API_URL_BASE),
headers={
'if-modified-since': self._format_dt(modified_since),
**self._get_headers(),
},
timeout=self._timeout,
)
if not response.ok:
raise FuelCheckError.create(response)
# return response.text
return GetReferenceDataResponse.deserialize(response.json()) |
def parse_require(self, env, keys, defaults={}):
"""
check and get require config
:param dict env: user config node
:param list keys: check keys
.. note::
option and key name must be same.
:param dict defaults: default value for keys
:return: dict.env with verified.
.. exception::
will raise `ValueError` when some key missed.
"""
for k in keys:
env[k] = getattr(self.options, k) or env.get(k, None)
if env[k] is None:
self.error("config syntax error,"
"please set `%s` in your env: %s" % (k, env))
pass
for k, v in defaults.items():
env.setdefault(k, v)
return env |
def pre(self, command, output_dir, vars):
"""
Called before template is applied.
"""
# import pdb;pdb.set_trace()
vars['license_name'] = 'Apache'
vars['year'] = time.strftime('%Y', time.localtime()) |
def Text(name, encoding=None):
"""
Match a route parameter.
`Any` is a synonym for `Text`.
:type name: `bytes`
:param name: Route parameter name.
:type encoding: `bytes`
:param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one.
:return: ``callable`` suitable for use with `route` or `subroute`.
"""
def _match(request, value):
return name, query.Text(
value,
encoding=contentEncoding(request.requestHeaders, encoding))
return _match |
def Integer(name, base=10, encoding=None):
"""
Match an integer route parameter.
:type name: `bytes`
:param name: Route parameter name.
:type base: `int`
:param base: Base to interpret the value in.
:type encoding: `bytes`
:param encoding: Default encoding to assume if the ``Content-Type``
header is lacking one.
:return: ``callable`` suitable for use with `route` or `subroute`.
"""
def _match(request, value):
return name, query.Integer(
value,
base=base,
encoding=contentEncoding(request.requestHeaders, encoding))
return _match |
def _matchRoute(components, request, segments, partialMatching):
"""
Match a request path against our path components.
The path components are always matched relative to their parent is in the
resource hierarchy, in other words it is only possible to match URIs nested
more deeply than the parent resource.
:type components: ``iterable`` of `bytes` or `callable`
:param components: Iterable of path components, to match against the
request, either static strings or dynamic parameters. As a convenience,
a single `bytes` component containing ``/`` may be given instead of
manually separating the components. If no components are given the null
route is matched, this is the case where ``segments`` is empty.
:type segments: ``sequence`` of `bytes`
:param segments: Sequence of path segments, from the request, to match
against.
:type partialMatching: `bool`
:param partialMatching: Allow partial matching against the request path?
:rtype: 2-`tuple` of `dict` keyed on `bytes` and `list` of `bytes`
:return: Pair of parameter results, mapping parameter names to processed
values, and a list of the remaining request path segments. If there is
no route match the result will be ``None`` and the original request path
segments.
"""
if len(components) == 1 and isinstance(components[0], bytes):
components = components[0]
if components[:1] == '/':
components = components[1:]
components = components.split('/')
results = OrderedDict()
NO_MATCH = None, segments
remaining = list(segments)
# Handle the null route.
if len(segments) == len(components) == 0:
return results, remaining
for us, them in izip_longest(components, segments):
if us is None:
if partialMatching:
# We've matched all of our components, there might be more
# segments for something else to process.
break
else:
return NO_MATCH
elif them is None:
# We've run out of path segments to match, so this route can't be
# the matching one.
return NO_MATCH
if callable(us):
name, match = us(request, them)
if match is None:
return NO_MATCH
results[name] = match
elif us != them:
return NO_MATCH
remaining.pop(0)
return results, remaining |
def routedResource(f, routerAttribute='router'):
"""
Decorate a router-producing callable to instead produce a resource.
This simply produces a new callable that invokes the original callable, and
calls ``resource`` on the ``routerAttribute``.
If the router producer has multiple routers the attribute can be altered to
choose the appropriate one, for example:
.. code-block:: python
class _ComplexRouter(object):
router = Router()
privateRouter = Router()
@router.route('/')
def publicRoot(self, request, params):
return SomethingPublic(...)
@privateRouter.route('/')
def privateRoot(self, request, params):
return SomethingPrivate(...)
PublicResource = routedResource(_ComplexRouter)
PrivateResource = routedResource(_ComplexRouter, 'privateRouter')
:type f: ``callable``
:param f: Callable producing an object with a `Router` attribute, for
example, a type.
:type routerAttribute: `str`
:param routerAttribute: Name of the `Router` attribute on the result of
calling ``f``.
:rtype: `callable`
:return: Callable producing an `IResource`.
"""
return wraps(f)(
lambda *a, **kw: getattr(f(*a, **kw), routerAttribute).resource()) |
def _forObject(self, obj):
"""
Create a new `Router` instance, with it's own set of routes, for
``obj``.
"""
router = type(self)()
router._routes = list(self._routes)
router._self = obj
return router |
def _addRoute(self, f, matcher):
"""
Add a route handler and matcher to the collection of possible routes.
"""
self._routes.append((f.func_name, f, matcher)) |
def route(self, *components):
"""
See `txspinneret.route.route`.
This decorator can be stacked with itself to specify multiple routes
with a single handler.
"""
def _factory(f):
self._addRoute(f, route(*components))
return f
return _factory |
def subroute(self, *components):
"""
See `txspinneret.route.subroute`.
This decorator can be stacked with itself to specify multiple routes
with a single handler.
"""
def _factory(f):
self._addRoute(f, subroute(*components))
return f
return _factory |
def _tempfile(filename):
"""
Create a NamedTemporaryFile instance to be passed to atomic_writer
"""
return tempfile.NamedTemporaryFile(mode='w',
dir=os.path.dirname(filename),
prefix=os.path.basename(filename),
suffix=os.fsencode('.tmp'),
delete=False) |
def atomic_write(filename):
"""
Open a NamedTemoraryFile handle in a context manager
"""
f = _tempfile(os.fsencode(filename))
try:
yield f
finally:
f.close()
# replace the original file with the new temp file (atomic on success)
os.replace(f.name, filename) |
def get_item(filename, uuid):
"""
Read entry from JSON file
"""
with open(os.fsencode(str(filename)), "r") as f:
data = json.load(f)
results = [i for i in data if i["uuid"] == str(uuid)]
if results:
return results
return None |
def set_item(filename, item):
"""
Save entry to JSON file
"""
with atomic_write(os.fsencode(str(filename))) as temp_file:
with open(os.fsencode(str(filename))) as products_file:
# load the JSON data into memory
products_data = json.load(products_file)
# check if UUID already exists
uuid_list = [i for i in filter(
lambda z: z["uuid"] == str(item["uuid"]), products_data)]
if len(uuid_list) == 0:
# add the new item to the JSON file
products_data.append(item)
# save the new JSON to the temp file
json.dump(products_data, temp_file)
return True
return None |
def update_item(filename, item, uuid):
"""
Update entry by UUID in the JSON file
"""
with atomic_write(os.fsencode(str(filename))) as temp_file:
with open(os.fsencode(str(filename))) as products_file:
# load the JSON data into memory
products_data = json.load(products_file)
# apply modifications to the JSON data wrt UUID
# TODO: handle this in a neat way
if 'products' in products_data[-1]:
# handle orders object
[products_data[i]["products"][0].update(item) for (
i, j) in enumerate(products_data) if j["uuid"] == str(uuid)]
else:
# handle products object
[products_data[i].update(item) for (i, j) in enumerate(
products_data) if j["uuid"] == str(uuid)]
# save the modified JSON data into the temp file
json.dump(products_data, temp_file)
return True |
def run(self, options):
"""
.. todo::
check network connection
:param Namespace options: parse result from argparse
:return:
"""
self.logger.debug("debug enabled...")
depends = ['git']
nil_tools = []
self.logger.info("depends list: %s", depends)
for v in depends:
real_path = shutil.which(v)
if real_path:
self.print_message("Found {}:{}..."
" {}".format(v,
real_path,
termcolor.colored(
'[OK]',
color='blue')))
else:
nil_tools.append(v)
self.error_message(
'Missing tool:`{}`... {}'.format(v, '[ERR]'), prefix='',
suffix='')
pass
if nil_tools:
self.print_message('')
self.error("please install missing tools...")
else:
self.print_message("\nNo error found,"
"you can use cliez in right way.")
self.logger.debug("check finished...")
pass
pass |
def experiment_data(self, commit=None, must_contain_results=False):
"""
:param commit: the commit that all the experiments should have happened or None to include all
:type commit: str
:param must_contain_results: include only tags that contain results
:type must_contain_results: bool
:return: all the experiment data
:rtype: dict
"""
results = {}
for tag in self.__repository.tags:
if not tag.name.startswith(self.__tag_prefix):
continue
data = json.loads(tag.tag.message)
if "results" not in data and must_contain_results:
continue
if commit is not None and tag.tag.object.hexsha != name_to_object(self.__repository, commit).hexsha:
continue
results[tag.name] = data
return results |
def delete(self, experiment_name):
"""
Delete an experiment by removing the associated tag.
:param experiment_name: the name of the experiment to be deleted
:type experiment_name: str
:rtype bool
:return if deleting succeeded
"""
if not experiment_name.startswith(self.__tag_prefix):
target_tag = self.__tag_prefix + experiment_name
else:
target_tag = experiment_name
if target_tag not in [t.name for t in self.__repository.tags]:
return False
self.__repository.delete_tag(target_tag)
return target_tag not in [t.name for t in self.__repository.tags] |
def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'command':
plugin.command_handle()
else:
plugin.unknown("Unknown actions.") |
def command_handle(self):
"""Get the number of the shell command."""
self.__results = self.execute(self.args.command)
self.close()
self.logger.debug("results: {}".format(self.__results))
if not self.__results:
self.unknown("{} return nothing.".format(self.args.command))
if len(self.__results) != 1:
self.unknown(
"{} return more than one number.".format(
self.args.command))
self.__result = int(self.__results[0])
self.logger.debug("result: {}".format(self.__result))
if not isinstance(self.__result, (int, long)):
self.unknown(
"{} didn't return single number.".format(
self.args.command))
status = self.ok
# Compare the vlaue.
if self.__result > self.args.warning:
status = self.warning
if self.__result > self.args.critical:
status = self.critical
# Output
self.shortoutput = "{0} return {1}.".format(
self.args.command, self.__result)
[self.longoutput.append(line)
for line in self.__results if self.__results]
self.perfdata.append("{command}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
command=self.args.command))
# Return status with message to Nagios.
status(self.output(long_output_limit=None))
self.logger.debug("Return status and exit to Nagios.") |
def execute(self, command, timeout=None):
"""Execute a shell command."""
try:
self.channel = self.ssh.get_transport().open_session()
except paramiko.SSHException as e:
self.unknown("Create channel error: %s" % e)
try:
self.channel.settimeout(self.args.timeout if not timeout else timeout)
except socket.timeout as e:
self.unknown("Settimeout for channel error: %s" % e)
try:
self.logger.debug("command: {}".format(command))
self.channel.exec_command(command)
except paramiko.SSHException as e:
self.unknown("Execute command error: %s" % e)
try:
self.stdin = self.channel.makefile('wb', -1)
self.stderr = map(string.strip, self.channel.makefile_stderr('rb', -1).readlines())
self.stdout = map(string.strip, self.channel.makefile('rb', -1).readlines())
except Exception as e:
self.unknown("Get result error: %s" % e)
try:
self.status = self.channel.recv_exit_status()
except paramiko.SSHException as e:
self.unknown("Get return code error: %s" % e)
else:
if self.status != 0:
self.unknown("Return code: %d , stderr: %s" % (self.status, self.errors))
else:
return self.stdout
finally:
self.logger.debug("Execute command finish.") |
def close(self):
"""Close and exit the connection."""
try:
self.ssh.close()
self.logger.debug("close connect succeed.")
except paramiko.SSHException as e:
self.unknown("close connect error: %s" % e) |
def slinky(filename, seconds_available, bucket_name, aws_key, aws_secret):
"""Simple program that creates an temp S3 link."""
if not os.environ.get('AWS_ACCESS_KEY_ID') and os.environ.get('AWS_SECRET_ACCESS_KEY'):
print 'Need to set environment variables for AWS access and create a slinky bucket.'
exit()
print create_temp_s3_link(filename, seconds_available, bucket_name) |
def check_readable(self, timeout):
"""
Poll ``self.stdout`` and return True if it is readable.
:param float timeout: seconds to wait I/O
:return: True if readable, else False
:rtype: boolean
"""
rlist, wlist, xlist = select.select([self._stdout], [], [], timeout)
return bool(len(rlist)) |
def get_indices_list(s: Any) -> List[str]:
""" Retrieve a list of characters and escape codes where each escape
code uses only one index. The indexes will not match up with the
indexes in the original string.
"""
indices = get_indices(s)
return [
indices[i] for i in sorted(indices, key=int)
] |
def strip_codes(s: Any) -> str:
""" Strip all color codes from a string.
Returns empty string for "falsey" inputs.
"""
return codepat.sub('', str(s) if (s or (s == 0)) else '') |
def init_build(self, asset, builder):
"""
Called when builder group collect files
Resolves absolute url if relative passed
:type asset: static_bundle.builders.Asset
:type builder: static_bundle.builders.StandardBuilder
"""
if not self.abs_path:
rel_path = utils.prepare_path(self.rel_bundle_path)
self.abs_bundle_path = utils.prepare_path([builder.config.input_dir, rel_path])
self.abs_path = True
self.input_dir = builder.config.input_dir |
def add_file(self, *args):
"""
Add single file or list of files to bundle
:type: file_path: str|unicode
"""
for file_path in args:
self.files.append(FilePath(file_path, self)) |
def add_directory(self, *args, **kwargs):
"""
Add directory or directories list to bundle
:param exclusions: List of excluded paths
:type path: str|unicode
:type exclusions: list
"""
exc = kwargs.get('exclusions', None)
for path in args:
self.files.append(DirectoryPath(path, self, exclusions=exc)) |
def add_path_object(self, *args):
"""
Add custom path objects
:type: path_object: static_bundle.paths.AbstractPath
"""
for obj in args:
obj.bundle = self
self.files.append(obj) |
def add_prepare_handler(self, prepare_handlers):
"""
Add prepare handler to bundle
:type: prepare_handler: static_bundle.handlers.AbstractPrepareHandler
"""
if not isinstance(prepare_handlers, static_bundle.BUNDLE_ITERABLE_TYPES):
prepare_handlers = [prepare_handlers]
if self.prepare_handlers_chain is None:
self.prepare_handlers_chain = []
for handler in prepare_handlers:
self.prepare_handlers_chain.append(handler) |
def prepare(self):
"""
Called when builder run collect files in builder group
:rtype: list[static_bundle.files.StaticFileResult]
"""
result_files = self.collect_files()
chain = self.prepare_handlers_chain
if chain is None:
# default handlers
chain = [
LessCompilerPrepareHandler()
]
for prepare_handler in chain:
result_files = prepare_handler.prepare(result_files, self)
return result_files |
def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'filenumber':
plugin.filenumber_handle()
else:
plugin.unknown("Unknown actions.") |
def filenumber_handle(self):
"""Get the number of files in the folder."""
self.__results = []
self.__dirs = []
self.__files = []
self.__ftp = self.connect()
self.__ftp.dir(self.args.path, self.__results.append)
self.logger.debug("dir results: {}".format(self.__results))
self.quit()
status = self.ok
for data in self.__results:
if "<DIR>" in data:
self.__dirs.append(str(data.split()[3]))
else:
self.__files.append(str(data.split()[2]))
self.__result = len(self.__files)
self.logger.debug("result: {}".format(self.__result))
# Compare the vlaue.
if self.__result > self.args.warning:
status = self.warning
if self.__result > self.args.critical:
status = self.critical
# Output
self.shortoutput = "Found {0} files in {1}.".format(self.__result,
self.args.path)
[self.longoutput.append(line)
for line in self.__results if self.__results]
self.perfdata.append("{path}={result};{warn};{crit};0;".format(
crit=self.args.critical,
warn=self.args.warning,
result=self.__result,
path=self.args.path))
self.logger.debug("Return status and output.")
status(self.output()) |
def register_json(self, data):
"""
Register the contents as JSON
"""
j = json.loads(data)
self.last_data_timestamp = \
datetime.datetime.utcnow().replace(microsecond=0).isoformat()
try:
for v in j:
# prepare the sensor entry container
self.data[v[self.id_key]] = {}
# add the mandatory entries
self.data[v[self.id_key]][self.id_key] = \
v[self.id_key]
self.data[v[self.id_key]][self.value_key] = \
v[self.value_key]
# add the optional well known entries if provided
if self.unit_key in v:
self.data[v[self.id_key]][self.unit_key] = \
v[self.unit_key]
if self.threshold_key in v:
self.data[v[self.id_key]][self.threshold_key] = \
v[self.threshold_key]
# add any further entries found
for k in self.other_keys:
if k in v:
self.data[v[self.id_key]][k] = v[k]
# add the custom sensor time
if self.sensor_time_key in v:
self.data[v[self.sensor_time_key]][self.sensor_time_key] = \
v[self.sensor_time_key]
# last: add the time the data was received (overwriting any
# not properly defined timestamp that was already there)
self.data[v[self.id_key]][self.time_key] = \
self.last_data_timestamp
except KeyError as e:
print("The main key was not found on the serial input line: " + \
str(e))
except ValueError as e:
print("No valid JSON string received. Waiting for the next turn.")
print("The error was: " + str(e)) |
def get_text(self):
"""
Get the data in text form (i.e. human readable)
"""
t = "==== " + str(self.last_data_timestamp) + " ====\n"
for k in self.data:
t += k + " " + str(self.data[k][self.value_key])
u = ""
if self.unit_key in self.data[k]:
u = self.data[k][self.unit_key]
t += u
if self.threshold_key in self.data[k]:
if (self.data[k][self.threshold_key] < \
self.data[k][self.value_key]):
t += " !Warning: Value is over threshold: " + \
str(self.data[k][self.threshold_key]) + "!"
else:
t += " (" + str(self.data[k][self.threshold_key]) + u + ")"
for l in self.other_keys:
if l in self.data[k]:
t += " " + self.data[k][l]
t += "\n"
return t |
def get_translated_data(self):
"""
Translate the data with the translation table
"""
j = {}
for k in self.data:
d = {}
for l in self.data[k]:
d[self.translation_keys[l]] = self.data[k][l]
j[k] = d
return j |
def get_json(self, prettyprint=False, translate=True):
"""
Get the data in JSON form
"""
j = []
if translate:
d = self.get_translated_data()
else:
d = self.data
for k in d:
j.append(d[k])
if prettyprint:
j = json.dumps(j, indent=2, separators=(',',': '))
else:
j = json.dumps(j)
return j |
def get_json_tuples(self, prettyprint=False, translate=True):
"""
Get the data as JSON tuples
"""
j = self.get_json(prettyprint, translate)
if len(j) > 2:
if prettyprint:
j = j[1:-2] + ",\n"
else:
j = j[1:-1] + ","
else:
j = ""
return j |
def get(self, url, params={}):
"""
Issues a GET request against the API, properly formatting the params
:param url: a string, the url you are requesting
:param params: a dict, the key-value of all the paramaters needed
in the request
:returns: a dict parsed of the JSON response
"""
params.update({'api_key': self.api_key})
try:
response = requests.get(self.host + url, params=params)
except RequestException as e:
response = e.args
return self.json_parse(response.content) |
def post(self, url, params={}, files=None):
"""
Issues a POST request against the API, allows for multipart data uploads
:param url: a string, the url you are requesting
:param params: a dict, the key-value of all the parameters needed
in the request
:param files: a list, the list of tuples of files
:returns: a dict parsed of the JSON response
"""
params.update({'api_key': self.api_key})
try:
response = requests.post(self.host + url, data=params, files=files)
return self.json_parse(response.content)
except RequestException as e:
return self.json_parse(e.args) |
def json_parse(self, content):
"""
Wraps and abstracts content validation and JSON parsing
to make sure the user gets the correct response.
:param content: The content returned from the web request to be parsed as json
:returns: a dict of the json response
"""
try:
data = json.loads(content)
except ValueError, e:
return {'meta': { 'status': 500, 'msg': 'Server Error'}, 'response': {"error": "Malformed JSON or HTML was returned."}}
#We only really care about the response if we succeed
#and the error if we fail
if 'error' in data:
return {'meta': { 'status': 400, 'msg': 'Bad Request'}, 'response': {"error": data['error']}}
elif 'result' in data:
return data['result']
else:
return {} |
def load_values(self):
"""
Go through the env var map, transferring the values to this object
as attributes.
:raises: RuntimeError if a required env var isn't defined.
"""
for config_name, evar in self.evar_defs.items():
if evar.is_required and evar.name not in os.environ:
raise RuntimeError((
"Missing required environment variable: {evar_name}\n"
"{help_txt}"
).format(evar_name=evar.name, help_txt=evar.help_txt))
# Env var is present. Transfer its value over.
if evar.name in os.environ:
self[config_name] = os.environ.get(evar.name)
else:
self[config_name] = evar.default_val
# Perform any validations or transformations.
for filter in evar.filters:
current_val = self.get(config_name)
new_val = filter(current_val, evar)
self[config_name] = new_val
# This is the top-level filter that is often useful for checking
# the values of related env vars (instead of individual validation).
self._filter_all() |
def embed_data(request):
"""
Create a temporary directory with input data for the test.
The directory contents is copied from a directory with the same name as the module located in the same directory of
the test module.
"""
result = _EmbedDataFixture(request)
result.delete_data_dir()
result.create_data_dir()
yield result
result.delete_data_dir() |
def get_filename(self, *parts):
'''
Returns an absolute filename in the data-directory (standardized by StandardizePath).
@params parts: list(unicode)
Path parts. Each part is joined to form a path.
:rtype: unicode
:returns:
The full path prefixed with the data-directory.
@remarks:
This method triggers the data-directory creation.
'''
from zerotk.easyfs import StandardizePath
result = [self._data_dir] + list(parts)
result = '/'.join(result)
return StandardizePath(result) |
def assert_equal_files(self, obtained_fn, expected_fn, fix_callback=lambda x:x, binary=False, encoding=None):
'''
Compare two files contents. If the files differ, show the diff and write a nice HTML
diff file into the data directory.
Searches for the filenames both inside and outside the data directory (in that order).
:param unicode obtained_fn: basename to obtained file into the data directory, or full path.
:param unicode expected_fn: basename to expected file into the data directory, or full path.
:param bool binary:
Thread both files as binary files.
:param unicode encoding:
File's encoding. If not None, contents obtained from file will be decoded using this
`encoding`.
:param callable fix_callback:
A callback to "fix" the contents of the obtained (first) file.
This callback receives a list of strings (lines) and must also return a list of lines,
changed as needed.
The resulting lines will be used to compare with the contents of expected_fn.
:param bool binary:
.. seealso:: zerotk.easyfs.GetFileContents
'''
import os
from zerotk.easyfs import GetFileContents, GetFileLines
__tracebackhide__ = True
import io
def FindFile(filename):
# See if this path exists in the data dir
data_filename = self.get_filename(filename)
if os.path.isfile(data_filename):
return data_filename
# If not, we might have already received a full path
if os.path.isfile(filename):
return filename
# If we didn't find anything, raise an error
from ._exceptions import MultipleFilesNotFound
raise MultipleFilesNotFound([filename, data_filename])
obtained_fn = FindFile(obtained_fn)
expected_fn = FindFile(expected_fn)
if binary:
obtained_lines = GetFileContents(obtained_fn, binary=True)
expected_lines = GetFileContents(expected_fn, binary=True)
assert obtained_lines == expected_lines
else:
obtained_lines = fix_callback(GetFileLines(obtained_fn, encoding=encoding))
expected_lines = GetFileLines(expected_fn, encoding=encoding)
if obtained_lines != expected_lines:
html_fn = os.path.splitext(obtained_fn)[0] + '.diff.html'
html_diff = self._generate_html_diff(
expected_fn, expected_lines, obtained_fn, obtained_lines)
with io.open(html_fn, 'w') as f:
f.write(html_diff)
import difflib
diff = ['FILES DIFFER:', obtained_fn, expected_fn]
diff += ['HTML DIFF: %s' % html_fn]
diff += difflib.context_diff(obtained_lines, expected_lines)
raise AssertionError('\n'.join(diff) + '\n') |
def _generate_html_diff(self, expected_fn, expected_lines, obtained_fn, obtained_lines):
"""
Returns a nice side-by-side diff of the given files, as a string.
"""
import difflib
differ = difflib.HtmlDiff()
return differ.make_file(
fromlines=expected_lines,
fromdesc=expected_fn,
tolines=obtained_lines,
todesc=obtained_fn,
) |
def add_peer(self, peer):
"""
Add a peer or multiple peers to the PEERS variable, takes a single string or a list.
:param peer(list or string)
"""
if type(peer) == list:
for i in peer:
check_url(i)
self.PEERS.extend(peer)
elif type(peer) == str:
check_url(peer)
self.PEERS.append(peer) |
def remove_peer(self, peer):
"""
remove one or multiple peers from PEERS variable
:param peer(list or string):
"""
if type(peer) == list:
for x in peer:
check_url(x)
for i in self.PEERS:
if x in i:
self.PEERS.remove(i)
elif type(peer) == str:
check_url(peer)
for i in self.PEERS:
if peer == i:
self.PEERS.remove(i)
else:
raise ValueError('peer paramater did not pass url validation') |
def status(self):
"""
check the status of the network and the peers
:return: network_height, peer_status
"""
peer = random.choice(self.PEERS)
formatted_peer = 'http://{}:4001'.format(peer)
peerdata = requests.get(url=formatted_peer + '/api/peers/').json()['peers']
peers_status = {}
networkheight = max([x['height'] for x in peerdata])
for i in peerdata:
if 'http://{}:4001'.format(i['ip']) in self.PEERS:
peers_status.update({i['ip']: {
'height': i['height'],
'status': i['status'],
'version': i['version'],
'delay': i['delay'],
}})
return {
'network_height': networkheight,
'peer_status': peers_status
} |
def broadcast_tx(self, address, amount, secret, secondsecret=None, vendorfield=''):
"""broadcasts a transaction to the peerslist using ark-js library"""
peer = random.choice(self.PEERS)
park = Park(
peer,
4001,
constants.ARK_NETHASH,
'1.1.1'
)
return park.transactions().create(address, str(amount), vendorfield, secret, secondsecret) |
def register(self, service, name=''):
"""
Exposes a given service to this API.
"""
try:
is_model = issubclass(service, orb.Model)
except StandardError:
is_model = False
# expose an ORB table dynamically as a service
if is_model:
self.services[service.schema().dbname()] = (ModelService, service)
else:
super(OrbApiFactory, self).register(service, name=name) |
def main():
"""Register your own mode and handle method here."""
plugin = Register()
if plugin.args.option == 'sql':
plugin.sql_handle()
else:
plugin.unknown("Unknown actions.") |
def prepare(self, input_files, bundle):
"""
:type input_files: list[static_bundle.files.StaticFileResult]
:type bundle: static_bundle.bundles.AbstractBundle
:rtype: list
"""
out = []
for input_file in input_files:
if input_file.extension == "less" and os.path.isfile(input_file.abs_path):
output_file = self.get_compile_file(input_file, bundle)
self.compile(input_file, output_file)
out.append(output_file)
else:
out.append(input_file)
return out |
def main():
""" Main entry point, expects doctopt arg dict as argd. """
global DEBUG
argd = docopt(USAGESTR, version=VERSIONSTR, script=SCRIPT)
DEBUG = argd['--debug']
width = parse_int(argd['--width'] or DEFAULT_WIDTH) or 1
indent = parse_int(argd['--indent'] or (argd['--INDENT'] or 0))
prepend = ' ' * (indent * 4)
if prepend and argd['--indent']:
# Smart indent, change max width based on indention.
width -= len(prepend)
userprepend = argd['--prepend'] or (argd['--PREPEND'] or '')
prepend = ''.join((prepend, userprepend))
if argd['--prepend']:
# Smart indent, change max width based on prepended text.
width -= len(userprepend)
userappend = argd['--append'] or (argd['--APPEND'] or '')
if argd['--append']:
width -= len(userappend)
if argd['WORDS']:
# Try each argument as a file name.
argd['WORDS'] = (
(try_read_file(w) if len(w) < 256 else w)
for w in argd['WORDS']
)
words = ' '.join((w for w in argd['WORDS'] if w))
else:
# No text/filenames provided, use stdin for input.
words = read_stdin()
block = FormatBlock(words).iter_format_block(
chars=argd['--chars'],
fill=argd['--fill'],
prepend=prepend,
strip_first=argd['--stripfirst'],
append=userappend,
strip_last=argd['--striplast'],
width=width,
newlines=argd['--newlines'],
lstrip=argd['--lstrip'],
)
for i, line in enumerate(block):
if argd['--enumerate']:
# Current line number format supports up to 999 lines before
# messing up. Who would format 1000 lines like this anyway?
print('{: >3}: {}'.format(i + 1, line))
else:
print(line)
return 0 |
def debug(*args, **kwargs):
""" Print a message only if DEBUG is truthy. """
if not (DEBUG and args):
return None
# Include parent class name when given.
parent = kwargs.get('parent', None)
with suppress(KeyError):
kwargs.pop('parent')
# Go back more than once when given.
backlevel = kwargs.get('back', 1)
with suppress(KeyError):
kwargs.pop('back')
frame = inspect.currentframe()
# Go back a number of frames (usually 1).
while backlevel > 0:
frame = frame.f_back
backlevel -= 1
fname = os.path.split(frame.f_code.co_filename)[-1]
lineno = frame.f_lineno
if parent:
func = '{}.{}'.format(parent.__class__.__name__, frame.f_code.co_name)
else:
func = frame.f_code.co_name
lineinfo = '{}:{} {}: '.format(
C(fname, 'yellow'),
C(str(lineno).ljust(4), 'blue'),
C().join(C(func, 'magenta'), '()').ljust(20)
)
# Patch args to stay compatible with print().
pargs = list(C(a, 'green').str() for a in args)
pargs[0] = ''.join((lineinfo, pargs[0]))
print_err(*pargs, **kwargs) |
def parse_int(s):
""" Parse a string as an integer.
Exit with a message on failure.
"""
try:
val = int(s)
except ValueError:
print_err('\nInvalid integer: {}'.format(s))
sys.exit(1)
return val |
def try_read_file(s):
""" If `s` is a file name, read the file and return it's content.
Otherwise, return the original string.
Returns None if the file was opened, but errored during reading.
"""
try:
with open(s, 'r') as f:
data = f.read()
except FileNotFoundError:
# Not a file name.
return s
except EnvironmentError as ex:
print_err('\nFailed to read file: {}\n {}'.format(s, ex))
return None
return data |
def close(self):
"""
Disconnect and close *Vim*.
"""
self._tempfile.close()
self._process.terminate()
if self._process.is_alive():
self._process.kill() |
def send_keys(self, keys, wait=True):
"""
Send a raw key sequence to *Vim*.
.. note:: *Vim* style key sequence notation (like ``<Esc>``)
is not recognized.
Use escaped characters (like ``'\033'``) instead.
Example:
>>> import headlessvim
>>> with headlessvim.open() as vim:
... vim.send_keys('ispam\033')
... str(vim.display_lines()[0].strip())
...
'spam'
:param strgin keys: key sequence to send
:param boolean wait: whether if wait a response
"""
self._process.stdin.write(bytearray(keys, self._encoding))
self._process.stdin.flush()
if wait:
self.wait() |
def wait(self, timeout=None):
"""
Wait for response until timeout.
If timeout is specified to None, ``self.timeout`` is used.
:param float timeout: seconds to wait I/O
"""
if timeout is None:
timeout = self._timeout
while self._process.check_readable(timeout):
self._flush() |
def install_plugin(self, dir, entry_script=None):
"""
Install *Vim* plugin.
:param string dir: the root directory contains *Vim* script
:param string entry_script: path to the initializing script
"""
self.runtimepath.append(dir)
if entry_script is not None:
self.command('runtime! {0}'.format(entry_script), False) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.