sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
---|---|---|
def write_results(self, data, name=None):
"""
Write JSON to file with the specified name.
:param name: Path to the file to be written to. If no path is passed
a new JSON file "results.json" will be created in the
current working directory.
:param output: JSON object.
"""
if name:
filepath = os.path.abspath(name)
else:
filepath = os.path.join(os.path.getcwd(), "results.json")
with open(filepath, "w", encoding="utf8") as f:
try:
f.write(unicode(json.dumps(data, indent=4)))
except NameError:
f.write(json.dumps(data, indent=4)) | Write JSON to file with the specified name.
:param name: Path to the file to be written to. If no path is passed
a new JSON file "results.json" will be created in the
current working directory.
:param output: JSON object. | entailment |
def create_engine(engine, options=None, defaults=None):
'''
Creates an instance of an engine.
There is a two-stage instantiation process with engines.
1. ``options``:
The keyword options to instantiate the engine class
2. ``defaults``:
The default configuration for the engine (options often depends on instantiated TTS engine)
'''
if engine not in _ENGINE_MAP.keys():
raise TTSError('Unknown engine %s' % engine)
options = options or {}
defaults = defaults or {}
einst = _ENGINE_MAP[engine](**options)
einst.configure_default(**defaults)
return einst | Creates an instance of an engine.
There is a two-stage instantiation process with engines.
1. ``options``:
The keyword options to instantiate the engine class
2. ``defaults``:
The default configuration for the engine (options often depends on instantiated TTS engine) | entailment |
def classify(self, txt):
'''
Classifies text by language. Uses preferred_languages weighting.
'''
ranks = []
for lang, score in langid.rank(txt):
if lang in self.preferred_languages:
score += self.preferred_factor
ranks.append((lang, score))
ranks.sort(key=lambda x: x[1], reverse=True)
return ranks[0][0] | Classifies text by language. Uses preferred_languages weighting. | entailment |
def get_engine_for_lang(self, lang):
'''
Determines the preferred engine/voice for a language.
'''
for eng in self.engines:
if lang in eng.languages.keys():
return eng
raise TTSError('Could not match language') | Determines the preferred engine/voice for a language. | entailment |
def say(self, txt, lang=None):
'''
Says the text.
if ``lang`` is ``None``, then uses ``classify()`` to detect language.
'''
lang = lang or self.classify(txt)
self.get_engine_for_lang(lang).say(txt, language=lang) | Says the text.
if ``lang`` is ``None``, then uses ``classify()`` to detect language. | entailment |
def configure_default(self, **_options):
'''
Sets default configuration.
Raises TTSError on error.
'''
language, voice, voiceinfo, options = self._configure(**_options)
self.languages_options[language] = (voice, options)
self.default_language = language
self.default_options = options | Sets default configuration.
Raises TTSError on error. | entailment |
def configure(self, **_options):
'''
Sets language-specific configuration.
Raises TTSError on error.
'''
language, voice, voiceinfo, options = self._configure(**_options)
self.languages_options[language] = (voice, options) | Sets language-specific configuration.
Raises TTSError on error. | entailment |
def say(self, phrase, **_options):
'''
Says the phrase, optionally allows to select/override any voice options.
'''
language, voice, voiceinfo, options = self._configure(**_options)
self._logger.debug("Saying '%s' with '%s'", phrase, self.SLUG)
self._say(phrase, language, voice, voiceinfo, options) | Says the phrase, optionally allows to select/override any voice options. | entailment |
def play(self, filename, translate=False): # pragma: no cover
'''
Plays the sounds.
:filename: The input file name
:translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV.
'''
# FIXME: Use platform-independent and async audio-output here
# PyAudio looks most promising, too bad about:
# --allow-external PyAudio --allow-unverified PyAudio
if translate:
with tempfile.NamedTemporaryFile(suffix='.wav', delete=False) as f:
fname = f.name
with audioread.audio_open(filename) as f:
with contextlib.closing(wave.open(fname, 'w')) as of:
of.setnchannels(f.channels)
of.setframerate(f.samplerate)
of.setsampwidth(2)
for buf in f:
of.writeframes(buf)
filename = fname
if winsound:
winsound.PlaySound(str(filename), winsound.SND_FILENAME)
else:
cmd = ['aplay', str(filename)]
self._logger.debug('Executing %s', ' '.join([pipes.quote(arg) for arg in cmd]))
subprocess.call(cmd)
if translate:
os.remove(fname) | Plays the sounds.
:filename: The input file name
:translate: If True, it runs it through audioread which will translate from common compression formats to raw WAV. | entailment |
def getCollectDServer(queue, cfg):
"""Get the appropriate collectd server (multi processed or not)"""
server = CollectDServerMP if cfg.collectd_workers > 1 else CollectDServer
return server(queue, cfg) | Get the appropriate collectd server (multi processed or not) | entailment |
def _hashes_match(self, a, b):
"""Constant time comparison of bytes for py3, strings for py2"""
if len(a) != len(b):
return False
diff = 0
if six.PY2:
a = bytearray(a)
b = bytearray(b)
for x, y in zip(a, b):
diff |= x ^ y
return not diff | Constant time comparison of bytes for py3, strings for py2 | entailment |
def load_sites(*basin_ids):
"""
Load metadata for all sites in given basin codes.
"""
# Resolve basin ids to HUC8s if needed
basins = []
for basin in basin_ids:
if basin.isdigit() and len(basin) == 8:
basins.append(basin)
else:
from climata.huc8 import get_huc8
basins.extend(get_huc8(basin))
# Load sites with data since 1900
sites = StationMetaIO(
basin=basins,
parameter=list(elems.keys()),
start_date='1900-01-01',
end_date=date.today(),
meta=ALL_META_FIELDS,
)
# Load all sites (to get sites without data)
seen_sites = [site.uid for site in sites]
nodata_sites = [
site for site in StationMetaIO(basin=basins)
if site.uid not in seen_sites
]
# Determine the following from the site lists:
seen_auths = set() # Which authority codes are actually used by any site
seen_elems = set() # Which elems actually have data in any site
ranges = {} # The overall period of record for each site
for site in sites:
for auth in site.sids.keys():
seen_auths.add(auth)
start, end = None, None
for elem in site.valid_daterange:
s, e = site.valid_daterange[elem]
seen_elems.add(elem)
if s is None or e is None:
continue
if start is None or s < start:
start = s
if end is None or e > end:
end = e
ranges[site.uid] = [start, end]
# Check for authority codes that might not be in sites with data
for site in nodata_sites:
for auth in site.sids.keys():
seen_auths.add(auth)
# Print CSV headers (FIXME: use CsvFileIO for this?)
seen_auths = sorted(seen_auths)
seen_elems = sorted(seen_elems)
print(",".join(
['ACIS uid', 'name']
+ seen_auths
+ ['latitude', 'longitude', 'start', 'end', 'years']
+ [elems[elem]['desc'] for elem in seen_elems]
))
# Print sites with data
for site in sites:
# Determine if elems are available for entire period or shorter range
start, end = ranges[site.uid]
if start and end:
years = end.year - start.year + 1
elem_ranges = []
for elem in seen_elems:
estart, eend = site.valid_daterange[elem]
if estart is None:
erange = ""
elif estart == start and eend == end:
erange = "period"
else:
erange = "%s to %s" % (estart.date(), eend.date())
elem_ranges.append(erange)
# Output CSV row
print(",".join(map(
str,
[site.uid, site.name]
+ [site.sids.get(auth, "") for auth in seen_auths]
+ [site.latitude, site.longitude]
+ [start.date(), end.date(), years]
+ elem_ranges
)))
# Print CSV rows for sites without data
for site in nodata_sites:
print(",".join(map(
str,
[site.uid, site.name]
+ [site.sids.get(auth, "") for auth in seen_auths]
+ [site.latitude, site.longitude]
+ ["NO DATA"]
))) | Load metadata for all sites in given basin codes. | entailment |
def get_huc8(prefix):
"""
Return all HUC8s matching the given prefix (e.g. 1801) or basin name
(e.g. Klamath)
"""
if not prefix.isdigit():
# Look up hucs by name
name = prefix
prefix = None
for row in hucs:
if row.basin.lower() == name.lower():
# Use most general huc if two have the same name
if prefix is None or len(row.huc) < len(prefix):
prefix = row.huc
if prefix is None:
return []
huc8s = []
for row in hucs:
# Return all 8-digit hucs with given prefix
if len(row.huc) == 8 and row.huc.startswith(prefix):
huc8s.append(row.huc)
return huc8s | Return all HUC8s matching the given prefix (e.g. 1801) or basin name
(e.g. Klamath) | entailment |
def parse(self):
"""
Convert ACIS 'll' value into separate latitude and longitude.
"""
super(AcisIO, self).parse()
# This is more of a "mapping" step than a "parsing" step, but mappers
# only allow one-to-one mapping from input fields to output fields.
for row in self.data:
if 'meta' in row:
row = row['meta']
if 'll' in row:
row['longitude'], row['latitude'] = row['ll']
del row['ll'] | Convert ACIS 'll' value into separate latitude and longitude. | entailment |
def map_value(self, field, value):
"""
Clean up some values returned from the web service.
(overrides wq.io.mappers.BaseMapper)
"""
if field == 'sids':
# Site identifiers are returned as "[id] [auth_id]";
# Map to auth name for easier usability
ids = {}
for idinfo in value:
id, auth = idinfo.split(' ')
auth = AUTHORITY_BY_ID[auth]
ids[auth['name']] = id
return ids
if field == 'valid_daterange':
# Date ranges for each element are returned in an array
# (sorted by the order the elements were were requested);
# Convert to dictionary with element id as key
elems, complex = self.getlist('parameter')
ranges = {}
for elem, val in zip(elems, value):
if val:
start, end = val
ranges[elem] = (parse_date(start), parse_date(end))
else:
ranges[elem] = None, None
return ranges
return value | Clean up some values returned from the web service.
(overrides wq.io.mappers.BaseMapper) | entailment |
def get_field_names(self):
"""
ACIS web service returns "meta" and "data" for each station;
Use meta attributes as field names
"""
field_names = super(StationDataIO, self).get_field_names()
if set(field_names) == set(['meta', 'data']):
meta_fields = list(self.data[0]['meta'].keys())
if set(meta_fields) < set(self.getvalue('meta')):
meta_fields = self.getvalue('meta')
field_names = list(meta_fields) + ['data']
return field_names | ACIS web service returns "meta" and "data" for each station;
Use meta attributes as field names | entailment |
def usable_item(self, data):
"""
ACIS web service returns "meta" and "data" for each station; use meta
attributes as item values, and add an IO for iterating over "data"
"""
# Use metadata as item
item = data['meta']
# Add nested IO for data
elems, elems_is_complex = self.getlist('parameter')
if elems_is_complex:
elems = [elem['name'] for elem in elems]
add, add_is_complex = self.getlist('add')
item['data'] = DataIO(
data=data['data'],
parameter=elems,
add=add,
start_date=self.getvalue('start_date'),
end_date=self.getvalue('end_date'),
)
# TupleMapper will convert item to namedtuple
return super(StationDataIO, self).usable_item(item) | ACIS web service returns "meta" and "data" for each station; use meta
attributes as item values, and add an IO for iterating over "data" | entailment |
def load_data(self, data):
"""
MultiStnData data results are arrays without explicit dates;
Infer time series based on start date.
"""
dates = fill_date_range(self.start_date, self.end_date)
for row, date in zip(data, dates):
data = {'date': date}
if self.add:
# If self.add is set, results will contain additional
# attributes (e.g. flags). In that case, create one row per
# result, with attributes "date", "elem", "value", and one for
# each item in self.add.
for elem, vals in zip(self.parameter, row):
data['elem'] = elem
for add, val in zip(['value'] + self.add, vals):
data[add] = val
yield data
else:
# Otherwise, return one row per date, with "date" and each
# element's value as attributes.
for elem, val in zip(self.parameter, row):
# namedtuple doesn't like numeric field names
if elem.isdigit():
elem = "e%s" % elem
data[elem] = val
yield data | MultiStnData data results are arrays without explicit dates;
Infer time series based on start date. | entailment |
def get_field_names(self):
"""
Different field names depending on self.add setting (see load_data)
For BaseIO
"""
if self.add:
return ['date', 'elem', 'value'] + [flag for flag in self.add]
else:
field_names = ['date']
for elem in self.parameter:
# namedtuple doesn't like numeric field names
if elem.isdigit():
elem = "e%s" % elem
field_names.append(elem)
return field_names | Different field names depending on self.add setting (see load_data)
For BaseIO | entailment |
def fill_date_range(start_date, end_date, date_format=None):
"""
Function accepts start date, end date, and format (if dates are strings)
and returns a list of Python dates.
"""
if date_format:
start_date = datetime.strptime(start_date, date_format).date()
end_date = datetime.strptime(end_date, date_format).date()
date_list = []
while start_date <= end_date:
date_list.append(start_date)
start_date = start_date + timedelta(days=1)
return date_list | Function accepts start date, end date, and format (if dates are strings)
and returns a list of Python dates. | entailment |
def parse(self, value):
"""
Enforce rules and return parsed value
"""
if self.required and value is None:
raise ValueError("%s is required!" % self.name)
elif self.ignored and value is not None:
warn("%s is ignored for this class!" % self.name)
elif not self.multi and isinstance(value, (list, tuple)):
if len(value) > 1:
raise ValueError(
"%s does not accept multiple values!" % self.name
)
return value[0]
elif self.multi and value is not None:
if not isinstance(value, (list, tuple)):
return [value]
return value | Enforce rules and return parsed value | entailment |
def parse(self, value):
"""
Parse date
"""
value = super(DateOpt, self).parse(value)
if value is None:
return None
if isinstance(value, str):
value = self.parse_date(value)
if isinstance(value, datetime) and self.date_only:
value = value.date()
return value | Parse date | entailment |
def get_filter_options(cls):
"""
List all filter options defined on class (and superclasses)
"""
attr = '_filter_options_%s' % id(cls)
options = getattr(cls, attr, {})
if options:
return options
for key in dir(cls):
val = getattr(cls, key)
if isinstance(val, FilterOpt):
options[key] = val
setattr(cls, attr, options)
return options | List all filter options defined on class (and superclasses) | entailment |
def getlist(self, name):
"""
Retrieve given property from class/instance, ensuring it is a list.
Also determine whether the list contains simple text/numeric values or
nested dictionaries (a "complex" list)
"""
value = self.getvalue(name)
complex = {}
def str_value(val):
# TODO: nonlocal complex
if isinstance(val, dict):
complex['complex'] = True
return val
else:
return str(val)
if value is None:
pass
else:
value = [str_value(val) for val in as_list(value)]
return value, bool(complex) | Retrieve given property from class/instance, ensuring it is a list.
Also determine whether the list contains simple text/numeric values or
nested dictionaries (a "complex" list) | entailment |
def set_param(self, into, name):
"""
Set parameter key, noting whether list value is "complex"
"""
value, complex = self.getlist(name)
if value is not None:
into[name] = value
return complex | Set parameter key, noting whether list value is "complex" | entailment |
def get_params(self):
"""
Get parameters for web service, noting whether any are "complex"
"""
params = {}
complex = False
for name, opt in self.filter_options.items():
if opt.ignored:
continue
if self.set_param(params, name):
complex = True
return params, complex | Get parameters for web service, noting whether any are "complex" | entailment |
def params(self):
"""
URL parameters for wq.io.loaders.NetLoader
"""
params, complex = self.get_params()
url_params = self.default_params.copy()
url_params.update(self.serialize_params(params, complex))
return url_params | URL parameters for wq.io.loaders.NetLoader | entailment |
def serialize_params(self, params, complex=False):
"""
Serialize parameter names and values to a dict ready for urlencode()
"""
if complex:
# See climata.acis for an example implementation
raise NotImplementedError("Cannot serialize %s!" % params)
else:
# Simpler queries can use traditional URL parameters
return {
self.get_url_param(key): ','.join(val)
for key, val in params.items()
} | Serialize parameter names and values to a dict ready for urlencode() | entailment |
def get_inches(self):
''' convert the measurement to inches '''
if self._obs_value in self.MISSING:
return 'MISSING'
if self._obs_units == self.MILLIMETERS:
return round(self.INCH_CONVERSION_FACTOR * self._obs_value, 4) | convert the measurement to inches | entailment |
def formatted(self):
''' print a nicely formatted output of this report '''
return """
Weather Station: %s (%s, %s)
Elevation: %s m
Time: %s UTC
Air Temperature: %s C (%s F)
Wind Speed: %s m/s (%s mph)
Wind Direction: %s
Present Weather Obs: %s
Precipitation: %s
Cloud Coverage: %s oktas
Cloud Summation: %s
Solar Irradiance: %s
""" % (self.weather_station, self.latitude, self.longitude,
self.elevation, self.datetime, self.air_temperature,
self.air_temperature.get_fahrenheit(), self.wind_speed,
self.wind_speed.get_miles(), self.wind_direction,
str(self.present_weather), str(self.precipitation),
str(self.sky_cover), str(self.sky_cover_summation),
str(self.solar_irradiance)) | print a nicely formatted output of this report | entailment |
def loads(self, noaa_string):
''' load in a report (or set) from a string '''
self.raw = noaa_string
self.weather_station = noaa_string[4:10]
self.wban = noaa_string[10:15]
expected_length = int(noaa_string[0:4]) + self.PREAMBLE_LENGTH
actual_length = len(noaa_string)
if actual_length != expected_length:
msg = "Non matching lengths. Expected %d, got %d" % (expected_length,
actual_length)
raise ish_reportException(msg)
try:
self.datetime = datetime.strptime(noaa_string[15:27], '%Y%m%d%H%M')
except ValueError:
''' some cases, we get 2400 hours, which is really the next day, so
this is a workaround for those cases '''
time = noaa_string[15:27]
time = time.replace("2400", "2300")
self.datetime = datetime.strptime(time, '%Y%m%d%H%M')
self.datetime += timedelta(hours=1)
self.datetime = self.datetime.replace(tzinfo=pytz.UTC)
self.report_type = ReportType(noaa_string[41:46].strip())
self.latitude = float(noaa_string[28:34]) / self.GEO_SCALE
self.longitude = float(noaa_string[34:41]) / self.GEO_SCALE
self.elevation = int(noaa_string[46:51])
''' other mandatory fields '''
self.wind_direction = Direction(noaa_string[60:63],
Direction.RADIANS,
noaa_string[63:64])
self.wind_observation_direction_type = noaa_string[64:64]
self.wind_speed = Speed(int(noaa_string[65:69]) / float(self.SPEED_SCALE),
Speed.METERSPERSECOND,
noaa_string[69:70])
self.sky_ceiling = Distance(int(noaa_string[70:75]),
Distance.METERS,
noaa_string[75:76])
self.sky_ceiling_determination = noaa_string[76:77]
self.visibility_distance = Distance(int(noaa_string[78:84]),
Distance.METERS,
noaa_string[84:85])
self.visibility_variability = noaa_string[85:86]
self.visibility_variability_quality = noaa_string[86:87]
self.air_temperature = Temperature(int(noaa_string[87:92]) / self.TEMPERATURE_SCALE,
Units.CELSIUS,
noaa_string[92:93])
self.dew_point = Temperature(int(noaa_string[93:98]) / self.TEMPERATURE_SCALE,
Units.CELSIUS,
noaa_string[98:99])
self.humidity = Humidity(str(self.air_temperature), str(self.dew_point))
self.sea_level_pressure = Pressure(int(noaa_string[99:104])/self.PRESSURE_SCALE,
Pressure.HECTOPASCALS,
noaa_string[104:104])
''' handle the additional fields '''
additional = noaa_string[105:108]
if additional == 'ADD':
position = 108
while position < expected_length:
try:
(position, (addl_code, addl_string)) = self._get_component(noaa_string,
position)
self._additional[addl_code] = addl_string
except ish_reportException as err:
''' this catches when we move to remarks section '''
break
''' handle the remarks section if it exists '''
try:
position = noaa_string.index('REM', 108)
self._get_remarks_component(noaa_string, position)
except (ish_reportException, ValueError) as err:
''' this catches when we move to EQD section '''
return self | load in a report (or set) from a string | entailment |
def _get_remarks_component(self, string, initial_pos):
''' Parse the remarks into the _remarks dict '''
remarks_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH]
if remarks_code != 'REM':
raise ish_reportException("Parsing remarks. Expected REM but got %s." % (remarks_code,))
expected_length = int(string[0:4]) + self.PREAMBLE_LENGTH
position = initial_pos + self.ADDR_CODE_LENGTH
while position < expected_length:
key = string[position:position + self.ADDR_CODE_LENGTH]
if key == 'EQD':
break
chars_to_read = string[position + self.ADDR_CODE_LENGTH:position + \
(self.ADDR_CODE_LENGTH * 2)]
chars_to_read = int(chars_to_read)
position += (self.ADDR_CODE_LENGTH * 2)
string_value = string[position:position + chars_to_read]
self._remarks[key] = string_value
position += chars_to_read | Parse the remarks into the _remarks dict | entailment |
def _get_component(self, string, initial_pos):
''' given a string and a position, return both an updated position and
either a Component Object or a String back to the caller '''
add_code = string[initial_pos:initial_pos + self.ADDR_CODE_LENGTH]
if add_code == 'REM':
raise ish_reportException("This is a remarks record")
if add_code == 'EQD':
raise ish_reportException("This is EQD record")
initial_pos += self.ADDR_CODE_LENGTH
try:
useable_map = self.MAP[add_code]
except:
raise BaseException("Cannot find code %s in string %s (%d)." % (add_code, string, initial_pos))
# if there is no defined length, then read next three chars to get it
# this only applies to REM types, which have 3 chars for the type, then variable
if useable_map[1] is False:
chars_to_read = string[initial_pos + self.ADDR_CODE_LENGTH:initial_pos + \
(self.ADDR_CODE_LENGTH * 2)]
chars_to_read = int(chars_to_read)
initial_pos += (self.ADDR_CODE_LENGTH * 2)
else:
chars_to_read = useable_map[1]
new_position = initial_pos + chars_to_read
string_value = string[initial_pos:new_position]
try:
object_value = useable_map[2]()
object_value.loads(string_value)
except IndexError as err:
object_value = string_value
return (new_position, [add_code, object_value]) | given a string and a position, return both an updated position and
either a Component Object or a String back to the caller | entailment |
def loads(self, string):
''' load from a string '''
for line in string.split("\n"):
if len(line) < 10:
continue
try:
report = ish_report()
report.loads(line)
self._reports.append(report)
except BaseException as exp:
''' don't complain TOO much '''
logging.warning('unable to load report, error: %s' % exp) | load from a string | entailment |
def get_observations(self):
''' return only specific weather observations (FM types) and
ignore the summary of day reports '''
return [rpt for rpt in self._reports if rpt.report_type in self.OBS_TYPES] | return only specific weather observations (FM types) and
ignore the summary of day reports | entailment |
def get_miles(self):
''' convert the measurement to inches '''
if self._obs_value in self.MISSING:
return 'MISSING'
if self._obs_units == self.METERSPERSECOND:
return round(2.23694 * self._obs_value, 4) | convert the measurement to inches | entailment |
def do_pot(self):
"""
Sync the template with the python code.
"""
files_to_translate = []
log.debug("Collecting python sources for pot ...")
for source_path in self._source_paths:
for source_path in self._iter_suffix(path=source_path, suffix=".py"):
log.debug("... add to pot: {source}".format(source=str(source_path)))
files_to_translate.append(str(source_path))
for system_file in self.SYSTEM_SOURCE_FILES:
files_to_translate.append(str(self._system_path / system_file))
# FIXME: use separate domain for system source translations? Nerge them when generating mo's?
log.debug("Finished collection sources.")
pot_path = (self._po_path / self._basename).with_suffix(".pot")
command = ["xgettext", "--keyword=_", "--keyword=_translate",
"--output={output}".format(output=str(pot_path))]
command.extend(files_to_translate)
check_call(command)
log.debug("pot file \"{pot}\" created!".format(pot=str(pot_path)))
pot_copy_path = self._mo_path / pot_path.name
log.debug("Copying pot file to mo path: {pot_copy_path}".format(pot_copy_path=str(pot_copy_path)))
shutil.copy(str(pot_path), str(pot_copy_path)) | Sync the template with the python code. | entailment |
def do_po(self):
"""
Update all po files with the data in the pot reference file.
"""
log.debug("Start updating po files ...")
pot_path = (self._po_path / self._basename).with_suffix(".pot")
for po_dir_path in self._iter_po_dir():
po_path = (po_dir_path / self._basename).with_suffix(".po")
if po_path.exists():
log.debug("update {po}:".format(po=str(po_path)))
check_call(["msgmerge", "-U", str(po_path), str(pot_path)])
else:
log.debug("create {po}:".format(po=str(po_path)))
check_call(["msginit", "-i", str(pot_path), "-o", str(po_path), "--no-translator"])
po_copy_path = self._mo_path / po_path.parent.name / po_path.name
po_copy_path.parent.mkdir(exist_ok=True)
log.debug("Copying po file to mo path: {po_copy_path}".format(po_copy_path=str(po_copy_path)))
shutil.copy(str(po_path), str(po_copy_path))
log.debug("All po files updated") | Update all po files with the data in the pot reference file. | entailment |
def do_mo(self):
"""
Generate mo files for all po files.
"""
log.debug("Start updating mo files ...")
for po_dir_path in self._iter_po_dir():
po_path = (po_dir_path / self._basename).with_suffix(".po")
lc_path = self._mo_path / po_dir_path.name / "LC_MESSAGES"
lc_path.mkdir(parents=True, exist_ok=True)
mo_path = (lc_path / self._basename).with_suffix(".mo")
log.debug("Creating from {po}: {mo}".format(po=str(po_path), mo=str(mo_path)))
check_call(["msgfmt", str(po_path), "-o", str(mo_path)])
log.debug("All mo files updated") | Generate mo files for all po files. | entailment |
def run(self) -> None:
"""Runs the worker and consumes messages from RabbitMQ.
Returns only after `shutdown()` is called.
"""
if self._logging_level:
logging.basicConfig(
level=getattr(logging, self._logging_level.upper()),
format="%(levelname).1s %(name)s.%(funcName)s:%(lineno)d - %(message)s")
signal.signal(signal.SIGINT, self._handle_sigint)
signal.signal(signal.SIGTERM, self._handle_sigterm)
if platform.system() != 'Windows':
# These features will not be available on Windows, but that is OK.
# Read this issue for more details:
# https://github.com/cenkalti/kuyruk/issues/54
signal.signal(signal.SIGHUP, self._handle_sighup)
signal.signal(signal.SIGUSR1, self._handle_sigusr1)
signal.signal(signal.SIGUSR2, self._handle_sigusr2)
self._started_at = os.times().elapsed
for t in self._threads:
t.start()
try:
signals.worker_start.send(self.kuyruk, worker=self)
self._consume_messages()
signals.worker_shutdown.send(self.kuyruk, worker=self)
finally:
self.shutdown_pending.set()
for t in self._threads:
t.join()
logger.debug("End run worker") | Runs the worker and consumes messages from RabbitMQ.
Returns only after `shutdown()` is called. | entailment |
def _process_message(self, message: amqp.Message) -> None:
"""Processes the message received from the queue."""
if self.shutdown_pending.is_set():
return
try:
if isinstance(message.body, bytes):
message.body = message.body.decode()
description = json.loads(message.body)
except Exception:
logger.error("Cannot decode message. Dropping. Message: %r", message.body)
traceback.print_exc()
message.channel.basic_reject(message.delivery_tag, requeue=False)
else:
logger.info("Processing task: %r", description)
self._process_description(message, description) | Processes the message received from the queue. | entailment |
def _apply_task(task: Task, args: Tuple, kwargs: Dict[str, Any]) -> Any:
"""Logs the time spent while running the task."""
if args is None:
args = ()
if kwargs is None:
kwargs = {}
start = monotonic()
try:
return task.apply(*args, **kwargs)
finally:
delta = monotonic() - start
logger.info("%s finished in %i seconds." % (task.name, delta)) | Logs the time spent while running the task. | entailment |
def _shutdown_timer(self) -> None:
"""Counts down from MAX_WORKER_RUN_TIME. When it reaches zero sutdown
gracefully.
"""
remaining = self._max_run_time - self.uptime
if not self.shutdown_pending.wait(remaining):
logger.warning('Run time reached zero')
self.shutdown() | Counts down from MAX_WORKER_RUN_TIME. When it reaches zero sutdown
gracefully. | entailment |
def _handle_sigint(self, signum: int, frame: Any) -> None:
"""Shutdown after processing current task."""
logger.warning("Catched SIGINT")
self.shutdown() | Shutdown after processing current task. | entailment |
def _handle_sighup(self, signum: int, frame: Any) -> None:
"""Used internally to fail the task when connection to RabbitMQ is
lost during the execution of the task.
"""
logger.warning("Catched SIGHUP")
exc_info = self._heartbeat_exc_info
self._heartbeat_exc_info = None
# Format exception info to see in tools like Sentry.
formatted_exception = ''.join(traceback.format_exception(*exc_info)) # noqa
raise HeartbeatError(exc_info) | Used internally to fail the task when connection to RabbitMQ is
lost during the execution of the task. | entailment |
def _handle_sigusr1(signum: int, frame: Any) -> None:
"""Print stacktrace."""
print('=' * 70)
print(''.join(traceback.format_stack()))
print('-' * 70) | Print stacktrace. | entailment |
def _handle_sigusr2(self, signum: int, frame: Any) -> None:
"""Drop current task."""
logger.warning("Catched SIGUSR2")
if self.current_task:
logger.warning("Dropping current task...")
raise Discard | Drop current task. | entailment |
def configuration_get_default_folder():
"""
Return the default folder where user-specific data is stored.
This depends of the system on which Python is running,
:return: path to the user-specific configuration data folder
"""
system = platform.system()
if system == 'Linux':
# https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html
sys_config_path = Path(os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config")))
elif system == 'Windows':
sys_config_path = Path(os.getenv('APPDATA', ''))
else:
log.error('Unknown system: "{system}" (using default configuration path)'.format(system=system))
sys_config_path = Path()
log.debug('User-specific system configuration folder="{sys_config_path}"'.format(
sys_config_path=sys_config_path))
sys_config = sys_config_path / PROJECT_TITLE
log.debug('User-specific {project} configuration folder="{sys_config}"'.format(
project=PROJECT_TITLE, sys_config=sys_config))
return sys_config | Return the default folder where user-specific data is stored.
This depends of the system on which Python is running,
:return: path to the user-specific configuration data folder | entailment |
def from_locale(cls, locale):
"""
Create a new Language instance from a locale string
:param locale: locale as string
:return: Language instance with instance.locale() == locale if locale is valid else instance of Unknown Language
"""
locale = str(locale)
if locale is 'unknown':
return UnknownLanguage(locale)
try:
return cls._from_xyz('locale', locale)
except NotALanguageException:
log.warning('Unknown locale: {}'.format(locale))
return UnknownLanguage(locale) | Create a new Language instance from a locale string
:param locale: locale as string
:return: Language instance with instance.locale() == locale if locale is valid else instance of Unknown Language | entailment |
def from_xx(cls, xx):
"""
Create a new Language instance from a ISO639 string
:param xx: ISO639 as string
:return: Language instance with instance.xx() == xx if xx is valid else instance of UnknownLanguage
"""
xx = str(xx).lower()
if xx is 'unknown':
return UnknownLanguage(xx)
try:
return cls._from_xyz('ISO639', xx)
except NotALanguageException:
log.warning('Unknown ISO639: {}'.format(xx))
return UnknownLanguage(xx) | Create a new Language instance from a ISO639 string
:param xx: ISO639 as string
:return: Language instance with instance.xx() == xx if xx is valid else instance of UnknownLanguage | entailment |
def from_xxx(cls, xxx):
"""
Create a new Language instance from a LanguageID string
:param xxx: LanguageID as string
:return: Language instance with instance.xxx() == xxx if xxx is valid else instance of UnknownLanguage
"""
xxx = str(xxx).lower()
if xxx is 'unknown':
return UnknownLanguage(xxx)
try:
return cls._from_xyz('LanguageID', xxx)
except NotALanguageException:
log.warning('Unknown LanguageId: {}'.format(xxx))
return UnknownLanguage(xxx) | Create a new Language instance from a LanguageID string
:param xxx: LanguageID as string
:return: Language instance with instance.xxx() == xxx if xxx is valid else instance of UnknownLanguage | entailment |
def from_name(cls, name):
"""
Create a new Language instance from a name as string
:param name: name as string
:return: Language instance with instance.name() == name if name is valid else instance of UnknownLanguage
"""
name = str(name).lower()
if name is 'unknown' or name is _('unknown'):
return UnknownLanguage(name)
try:
return cls._from_xyz('LanguageName', name)
except NotALanguageException:
log.warning('Unknown LanguageName: {}'.format(name))
return UnknownLanguage(name) | Create a new Language instance from a name as string
:param name: name as string
:return: Language instance with instance.name() == name if name is valid else instance of UnknownLanguage | entailment |
def _from_xyz(cls, xyzkey, xyzvalue):
"""
Private helper function to create new Language instance.
:param xyzkey: one of ('locale', 'ISO639', 'LanguageID', 'LanguageName')
:param xyzvalue: corresponding value of xyzkey
:return: Language instance
"""
if xyzvalue == 'unknown' or xyzvalue == _('unknown'):
return UnknownLanguage(xyzvalue)
for lang_id, lang_data in enumerate(LANGUAGES):
for data_value in lang_data[xyzkey]:
if xyzvalue == data_value.lower():
return cls(lang_id)
raise NotALanguageException(xyzvalue, 'Illegal language {}: {}'.format(xyzkey, xyzvalue)) | Private helper function to create new Language instance.
:param xyzkey: one of ('locale', 'ISO639', 'LanguageID', 'LanguageName')
:param xyzvalue: corresponding value of xyzkey
:return: Language instance | entailment |
def from_unknown(cls, value, xx=False, xxx=False, locale=False, name=False):
"""
Try to create a Language instance having only some limited data about the Language.
If no corresponding Language is found, a NotALanguageException is thrown.
:param value: data known about the language as string
:param xx: True if the value may be a locale
:param xxx: True if the value may be a LanguageID
:param locale: True if the value may be a locale
:param name: True if the value may be a LanguageName
:return: Language Instance if a matching Language was found
"""
# Use 2 lists instead of dict ==> order known
keys = ['ISO639', 'LanguageID', 'locale', 'LanguageName']
truefalses = [xx, xxx, locale, name]
value = value.lower()
for key, doKey in zip(keys, truefalses):
if doKey:
try:
return cls._from_xyz(key, value)
except NotALanguageException:
pass
raise NotALanguageException(value, 'Illegal language "{}"'.format(value)) | Try to create a Language instance having only some limited data about the Language.
If no corresponding Language is found, a NotALanguageException is thrown.
:param value: data known about the language as string
:param xx: True if the value may be a locale
:param xxx: True if the value may be a LanguageID
:param locale: True if the value may be a locale
:param name: True if the value may be a LanguageName
:return: Language Instance if a matching Language was found | entailment |
def from_file(cls, filepath, chunk_size=None):
"""
Try do determine the language of a text file.
:param filepath: string file path
:param chunk_size: amount of bytes of file to read to determine language
:return: Language instance if detection succeeded, otherwise return UnknownLanguage
"""
log.debug('Language.from_file: "{}", chunk={} ...'.format(filepath, chunk_size))
with filepath.open('rb') as f:
data = f.read(-1 if chunk_size is None else chunk_size)
data_ascii = asciify(data)
lang_xx = langdetect_detect(data_ascii)
lang = cls.from_xx(lang_xx)
log.debug('... result language={}'.format(lang))
return lang | Try do determine the language of a text file.
:param filepath: string file path
:param chunk_size: amount of bytes of file to read to determine language
:return: Language instance if detection succeeded, otherwise return UnknownLanguage | entailment |
def onFolderTreeClicked(self, proxyIndex):
"""What to do when a Folder in the tree is clicked"""
if not proxyIndex.isValid():
return
index = self.proxyFileModel.mapToSource(proxyIndex)
settings = QSettings()
folder_path = self.fileModel.filePath(index)
settings.setValue('mainwindow/workingDirectory', folder_path) | What to do when a Folder in the tree is clicked | entailment |
def send_to_queue(
self,
args: Tuple=(),
kwargs: Dict[str, Any]={},
host: str=None,
wait_result: Union[int, float]=None,
message_ttl: Union[int, float]=None,
) -> Any:
"""
Sends a message to the queue.
A worker will run the task's function when it receives the message.
:param args: Arguments that will be passed to task on execution.
:param kwargs: Keyword arguments that will be passed to task
on execution.
:param host: Send this task to specific host. ``host`` will be
appended to the queue name. If ``host`` is "localhost", hostname
of the server will be appended to the queue name.
:param wait_result:
Wait for result from worker for ``wait_result`` seconds.
If timeout occurs,
:class:`~kuyruk.exceptions.ResultTimeout` is raised.
If excecption occurs in worker,
:class:`~kuyruk.exceptions.RemoteException` is raised.
:param message_ttl:
If set, message will be destroyed in queue after ``message_ttl``
seconds.
:return: Result from worker if ``wait_result`` is set,
else :const:`None`.
"""
if self.kuyruk.config.EAGER:
# Run the task in current process
result = self.apply(*args, **kwargs)
return result if wait_result else None
logger.debug("Task.send_to_queue args=%r, kwargs=%r", args, kwargs)
queue = self._queue_for_host(host)
description = self._get_description(args, kwargs)
self._send_signal(signals.task_presend, args=args, kwargs=kwargs, description=description)
body = json.dumps(description)
msg = amqp.Message(body=body)
if wait_result:
# Use direct reply-to feature from RabbitMQ:
# https://www.rabbitmq.com/direct-reply-to.html
msg.properties['reply_to'] = 'amq.rabbitmq.reply-to'
if message_ttl:
msg.properties['expiration'] = str(int(message_ttl * 1000))
with self.kuyruk.channel() as ch:
if wait_result:
result = Result(ch.connection)
ch.basic_consume(queue='amq.rabbitmq.reply-to', no_ack=True, callback=result.process_message)
ch.queue_declare(queue=queue, durable=True, auto_delete=False)
ch.basic_publish(msg, exchange="", routing_key=queue)
self._send_signal(signals.task_postsend, args=args, kwargs=kwargs, description=description)
if wait_result:
return result.wait(wait_result) | Sends a message to the queue.
A worker will run the task's function when it receives the message.
:param args: Arguments that will be passed to task on execution.
:param kwargs: Keyword arguments that will be passed to task
on execution.
:param host: Send this task to specific host. ``host`` will be
appended to the queue name. If ``host`` is "localhost", hostname
of the server will be appended to the queue name.
:param wait_result:
Wait for result from worker for ``wait_result`` seconds.
If timeout occurs,
:class:`~kuyruk.exceptions.ResultTimeout` is raised.
If excecption occurs in worker,
:class:`~kuyruk.exceptions.RemoteException` is raised.
:param message_ttl:
If set, message will be destroyed in queue after ``message_ttl``
seconds.
:return: Result from worker if ``wait_result`` is set,
else :const:`None`. | entailment |
def _get_description(self, args: Tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]:
"""Return the dictionary to be sent to the queue."""
return {
'id': uuid1().hex,
'args': args,
'kwargs': kwargs,
'module': self._module_name,
'function': self.f.__name__,
'sender_hostname': socket.gethostname(),
'sender_pid': os.getpid(),
'sender_cmd': ' '.join(sys.argv),
'sender_timestamp': datetime.utcnow().isoformat()[:19],
} | Return the dictionary to be sent to the queue. | entailment |
def apply(self, *args: Any, **kwargs: Any) -> Any:
"""Called by workers to run the wrapped function.
You may call it yourself if you want to run the task in current process
without sending to the queue.
If task has a `retry` property it will be retried on failure.
If task has a `max_run_time` property the task will not be allowed to
run more than that.
"""
def send_signal(sig: Signal, **extra: Any) -> None:
self._send_signal(sig, args=args, kwargs=kwargs, **extra)
logger.debug("Applying %r, args=%r, kwargs=%r", self, args, kwargs)
send_signal(signals.task_preapply)
try:
tries = 1 + self.retry
while 1:
tries -= 1
send_signal(signals.task_prerun)
try:
with time_limit(self.max_run_time or 0):
return self.f(*args, **kwargs)
except Exception:
send_signal(signals.task_error, exc_info=sys.exc_info())
if tries <= 0:
raise
else:
break
finally:
send_signal(signals.task_postrun)
except Exception:
send_signal(signals.task_failure, exc_info=sys.exc_info())
raise
else:
send_signal(signals.task_success)
finally:
send_signal(signals.task_postapply) | Called by workers to run the wrapped function.
You may call it yourself if you want to run the task in current process
without sending to the queue.
If task has a `retry` property it will be retried on failure.
If task has a `max_run_time` property the task will not be allowed to
run more than that. | entailment |
def _module_name(self) -> str:
"""Module name of the wrapped function."""
name = self.f.__module__
if name == '__main__':
return importer.main_module_name()
return name | Module name of the wrapped function. | entailment |
def i18n_install(lc=None):
"""
Install internationalization support for the clients using the specified locale.
If there is no support for the locale, the default locale will be used.
As last resort, a null translator will be installed.
:param lc: locale to install. If None, the system default locale will be used.
"""
log.debug('i18n_install( {lc} ) called.'.format(lc=lc))
if lc is None:
lc = i18n_system_locale()
if lc is None:
log.debug('i18n_install(): installing NullTranslations')
translator = gettext.NullTranslations()
else:
child_locales = i18n_support_locale(lc) # Call i18n_support_locale to log the supported locales
log.debug('i18n_install(): installing gettext.translation(domain={domain}, localedir={localedir}, '
'languages={languages}, fallback={fallback})'.format(domain=project.PROJECT_TITLE.lower(),
localedir=i18n_get_path(),
languages=child_locales,
fallback=True))
translator = gettext.translation(
domain=project.PROJECT_TITLE.lower(), localedir=str(i18n_get_path()),
languages=child_locales, fallback=True)
translator.install(names=['ngettext']) | Install internationalization support for the clients using the specified locale.
If there is no support for the locale, the default locale will be used.
As last resort, a null translator will be installed.
:param lc: locale to install. If None, the system default locale will be used. | entailment |
def i18n_system_locale():
"""
Return the system locale
:return: the system locale (as a string)
"""
log.debug('i18n_system_locale() called')
lc, encoding = locale.getlocale()
log.debug('locale.getlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding))
if lc is None:
lc, encoding = locale.getdefaultlocale()
log.debug('locale.getdefaultlocale() = (lc="{lc}", encoding="{encoding}).'.format(lc=lc, encoding=encoding))
return lc | Return the system locale
:return: the system locale (as a string) | entailment |
def i18n_locale_fallbacks_calculate(lc):
"""
Calculate all child locales from a locale.
e.g. for locale="pt_BR.us-ascii", returns ["pt_BR.us-ascii", "pt_BR.us", "pt_BR", "pt"]
:param lc: locale for which the child locales are needed
:return: all child locales (including the parameter lc)
"""
log.debug('i18n_locale_fallbacks_calculate( locale="{locale}" ) called'.format(locale=lc))
locales = []
lc_original = lc
while lc:
locales.append(lc)
rindex = max([lc.rfind(separator) for separator in ['@', '_', '-', '.']])
if rindex == -1:
break
lc = lc[:rindex]
log.debug('i18n_locale_fallbacks_calculate( lc="{lc}" ) = {locales}'.format(lc=lc_original, locales=locales))
return locales | Calculate all child locales from a locale.
e.g. for locale="pt_BR.us-ascii", returns ["pt_BR.us-ascii", "pt_BR.us", "pt_BR", "pt"]
:param lc: locale for which the child locales are needed
:return: all child locales (including the parameter lc) | entailment |
def i18n_support_locale(lc_parent):
"""
Find out whether lc is supported. Returns all child locales (and eventually lc) which do have support.
:param lc_parent: Locale for which we want to know the child locales that are supported
:return: list of supported locales
"""
log.debug('i18n_support_locale( locale="{locale}" ) called'.format(locale=lc_parent))
lc_childs = i18n_locale_fallbacks_calculate(lc_parent)
locales = []
locale_path = i18n_get_path()
mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower())
for lc in lc_childs:
lc_mo_path = locale_path / lc / 'LC_MESSAGES' / mo_file
log.debug('Locale data "{lc_mo_path}" exists? ...'.format(lc_mo_path=lc_mo_path))
if lc_mo_path.is_file():
log.debug('... Yes! "{locale_path}" contains {mo_file}.'.format(locale_path=locale_path, mo_file=mo_file))
locales.append(lc)
else:
log.debug('... No')
log.debug('i18n_support_locale( lc="{lc}" ) = {locales}'.format(lc=lc_parent, locales=locales))
return locales | Find out whether lc is supported. Returns all child locales (and eventually lc) which do have support.
:param lc_parent: Locale for which we want to know the child locales that are supported
:return: list of supported locales | entailment |
def i18n_get_path():
"""
Get path to the internationalization data.
:return: path as a string
"""
local_locale_path = client_get_path() / 'locale'
if platform.system() == 'Linux':
if local_locale_path.exists():
return local_locale_path
else:
return Path('/usr/share/locale')
else:
return local_locale_path | Get path to the internationalization data.
:return: path as a string | entailment |
def i18n_get_supported_locales():
"""
List all locales that have internationalization data for this program
:return: List of locales
"""
locale_path = i18n_get_path()
log.debug('Scanning translation files .mo in locale path: {}'.format(locale_path))
langs = []
mo_file = '{project}.mo'.format(project=project.PROJECT_TITLE.lower())
for lc in locale_path.iterdir():
lc_mo_path = lc / 'LC_MESSAGES' / mo_file
if lc_mo_path.exists():
langs.append(lc.name)
log.debug('Detected: {langs}'.format(langs=langs))
return langs | List all locales that have internationalization data for this program
:return: List of locales | entailment |
def from_object(self, obj: Union[str, Any]) -> None:
"""Load values from an object."""
if isinstance(obj, str):
obj = importer.import_object_str(obj)
for key in dir(obj):
if key.isupper():
value = getattr(obj, key)
self._setattr(key, value)
logger.info("Config is loaded from object: %r", obj) | Load values from an object. | entailment |
def from_dict(self, d: Dict[str, Any]) -> None:
"""Load values from a dict."""
for key, value in d.items():
if key.isupper():
self._setattr(key, value)
logger.info("Config is loaded from dict: %r", d) | Load values from a dict. | entailment |
def from_pyfile(self, filename: str) -> None:
"""Load values from a Python file."""
globals_ = {} # type: Dict[str, Any]
locals_ = {} # type: Dict[str, Any]
with open(filename, "rb") as f:
exec(compile(f.read(), filename, 'exec'), globals_, locals_)
for key, value in locals_.items():
if (key.isupper() and not isinstance(value, types.ModuleType)):
self._setattr(key, value)
logger.info("Config is loaded from file: %s", filename) | Load values from a Python file. | entailment |
def from_env_vars(self) -> None:
"""Load values from environment variables.
Keys must start with `KUYRUK_`."""
for key, value in os.environ.items():
if key.startswith('KUYRUK_'):
key = key[7:]
if hasattr(Config, key):
try:
value = ast.literal_eval(value)
except (ValueError, SyntaxError):
pass
self._setattr(key, value) | Load values from environment variables.
Keys must start with `KUYRUK_`. | entailment |
def option(current_kwargs, **kwargs):
"""
Context manager for temporarily setting a keyword argument and
then restoring it to whatever it was before.
"""
tmp_kwargs = dict((key, current_kwargs.get(key)) for key, value in kwargs.items())
current_kwargs.update(kwargs)
yield
current_kwargs.update(tmp_kwargs) | Context manager for temporarily setting a keyword argument and
then restoring it to whatever it was before. | entailment |
def is_method_call(node, method_name):
"""
Returns True if `node` is a method call for `method_name`. `method_name`
can be either a string or an iterable of strings.
"""
if not isinstance(node, nodes.Call):
return False
if isinstance(node.node, nodes.Getattr):
# e.g. foo.bar()
method = node.node.attr
elif isinstance(node.node, nodes.Name):
# e.g. bar()
method = node.node.name
elif isinstance(node.node, nodes.Getitem):
# e.g. foo["bar"]()
method = node.node.arg.value
else:
return False
if isinstance(method_name, (list, tuple)):
return method in method_name
return method == method_name | Returns True if `node` is a method call for `method_name`. `method_name`
can be either a string or an iterable of strings. | entailment |
def is_loop_helper(node):
"""
Returns True is node is a loop helper e.g. {{ loop.index }} or {{ loop.first }}
"""
return hasattr(node, 'node') and isinstance(node.node, nodes.Name) and node.node.name == 'loop' | Returns True is node is a loop helper e.g. {{ loop.index }} or {{ loop.first }} | entailment |
def get_output(self):
"""
Returns the generated JavaScript code.
Returns:
str
"""
# generate the JS function string
template_function = TEMPLATE_WRAPPER.format(
function_name=self.js_function_name,
template_code=self.output.getvalue()
).strip()
# get the correct module format template
module_format = JS_MODULE_FORMATS[self.js_module_format]
# generate the module code
return module_format(self.dependencies, template_function) | Returns the generated JavaScript code.
Returns:
str | entailment |
def _get_depencency_var_name(self, dependency):
"""
Returns the variable name assigned to the given dependency or None if the dependency has
not yet been registered.
Args:
dependency (str): Thet dependency that needs to be imported.
Returns:
str or None
"""
for dep_path, var_name in self.dependencies:
if dep_path == dependency:
return var_name | Returns the variable name assigned to the given dependency or None if the dependency has
not yet been registered.
Args:
dependency (str): Thet dependency that needs to be imported.
Returns:
str or None | entailment |
def _add_dependency(self, dependency, var_name=None):
"""
Adds the given dependency and returns the variable name to use to access it. If `var_name`
is not given then a random one will be created.
Args:
dependency (str):
var_name (str, optional):
Returns:
str
"""
if var_name is None:
var_name = next(self.temp_var_names)
# Don't add duplicate dependencies
if (dependency, var_name) not in self.dependencies:
self.dependencies.append((dependency, var_name))
return var_name | Adds the given dependency and returns the variable name to use to access it. If `var_name`
is not given then a random one will be created.
Args:
dependency (str):
var_name (str, optional):
Returns:
str | entailment |
def _process_extends(self, node, **kwargs):
"""
Processes an extends block e.g. `{% extends "some/template.jinja" %}`
"""
# find all the blocks in this template
for b in self.ast.find_all(nodes.Block):
# if not already in `child_blocks` then this is the first time a
# block with this name has been encountered.
if b.name not in self.child_blocks:
self.child_blocks[b.name] = b
else:
# otherwise we have seen this block before, so we need to find the last
# super_block and add the block from this template to the end.
block = self.child_blocks.get(b.name)
while hasattr(block, 'super_block'):
block = block.super_block
block.super_block = b
# load the parent template
parent_template = JinjaToJS(template_root=self.template_root,
template_name=node.template.value,
js_module_format=self.js_module_format,
runtime_path=self.runtime_path,
include_prefix=self.include_prefix,
include_ext=self.include_ext,
child_blocks=self.child_blocks,
dependencies=self.dependencies)
# add the parent templates output to the current output
self.output.write(parent_template.output.getvalue())
# Raise an exception so we stop parsing this template
raise ExtendsException | Processes an extends block e.g. `{% extends "some/template.jinja" %}` | entailment |
def _process_block(self, node, **kwargs):
"""
Processes a block e.g. `{% block my_block %}{% endblock %}`
"""
# check if this node already has a 'super_block' attribute
if not hasattr(node, 'super_block'):
# since it doesn't it must be the last block in the inheritance chain
node.super_block = None
# see if there has been a child block defined - if there is this
# will be the first block in the inheritance chain
child_block = self.child_blocks.get(node.name)
if child_block:
# we have child nodes so we need to set `node` as the
# super of the last one in the chain
last_block = child_block
while hasattr(last_block, 'super_block'):
last_block = child_block.super_block
# once we have found it, set this node as it's super block
last_block.super_block = node
# this is the node we want to process as it's the first in the inheritance chain
node = child_block
# process the block passing the it's super along, if this block
# calls super() it will be handled by `_process_call`
for n in node.body:
self._process_node(n, super_block=node.super_block, **kwargs) | Processes a block e.g. `{% block my_block %}{% endblock %}` | entailment |
def _process_output(self, node, **kwargs):
"""
Processes an output node, which will contain things like `Name` and `TemplateData` nodes.
"""
for n in node.nodes:
self._process_node(n, **kwargs) | Processes an output node, which will contain things like `Name` and `TemplateData` nodes. | entailment |
def _process_templatedata(self, node, **_):
"""
Processes a `TemplateData` node, this is just a bit of as-is text
to be written to the output.
"""
# escape double quotes
value = re.sub('"', r'\\"', node.data)
# escape new lines
value = re.sub('\n', r'\\n', value)
# append value to the result
self.output.write('__result += "' + value + '";') | Processes a `TemplateData` node, this is just a bit of as-is text
to be written to the output. | entailment |
def _process_name(self, node, **kwargs):
"""
Processes a `Name` node. Some examples of `Name` nodes:
{{ foo }} -> 'foo' is a Name
{% if foo }} -> 'foo' is a Name
"""
with self._interpolation():
with self._python_bool_wrapper(**kwargs):
if node.name not in self.stored_names and node.ctx != 'store':
self.output.write(self.context_name)
self.output.write('.')
if node.ctx == 'store':
self.stored_names.add(node.name)
self.output.write(node.name) | Processes a `Name` node. Some examples of `Name` nodes:
{{ foo }} -> 'foo' is a Name
{% if foo }} -> 'foo' is a Name | entailment |
def _process_getattr(self, node, **kwargs):
"""
Processes a `GetAttr` node. e.g. {{ foo.bar }}
"""
with self._interpolation():
with self._python_bool_wrapper(**kwargs) as new_kwargs:
if is_loop_helper(node):
self._process_loop_helper(node, **new_kwargs)
else:
self._process_node(node.node, **new_kwargs)
self.output.write('.')
self.output.write(node.attr) | Processes a `GetAttr` node. e.g. {{ foo.bar }} | entailment |
def _process_getitem(self, node, **kwargs):
"""
Processes a `GetItem` node e.g. {{ foo["bar"] }}
"""
with self._interpolation():
with self._python_bool_wrapper(**kwargs) as new_kwargs:
self._process_node(node.node, **new_kwargs)
if isinstance(node.arg, nodes.Slice):
self.output.write('.slice(')
if node.arg.step is not None:
raise Exception('The step argument is not supported when slicing.')
if node.arg.start is None:
self.output.write('0')
else:
self._process_node(node.arg.start, **new_kwargs)
if node.arg.stop is None:
self.output.write(')')
else:
self.output.write(',')
self._process_node(node.arg.stop, **new_kwargs)
self.output.write(')')
else:
self.output.write('[')
self._process_node(node.arg, **new_kwargs)
self.output.write(']') | Processes a `GetItem` node e.g. {{ foo["bar"] }} | entailment |
def _process_for(self, node, **kwargs):
"""
Processes a for loop. e.g.
{% for number in numbers %}
{{ number }}
{% endfor %}
{% for key, value in somemap.items() %}
{{ key }} -> {{ value }}
{% %}
"""
# since a for loop can introduce new names into the context
# we need to remember the ones that existed outside the loop
previous_stored_names = self.stored_names.copy()
with self._execution():
self.output.write('__runtime.each(')
if is_method_call(node.iter, dict.keys.__name__):
self.output.write('Object.keys(')
self._process_node(node.iter, **kwargs)
if is_method_call(node.iter, dict.keys.__name__):
self.output.write(')')
self.output.write(',')
self.output.write('function')
self.output.write('(')
# javascript iterations put the value first, then the key
if isinstance(node.target, nodes.Tuple):
if len(node.target.items) > 2:
raise Exception('De-structuring more than 2 items is not supported.')
for i, item in enumerate(reversed(node.target.items)):
self._process_node(item, **kwargs)
if i < len(node.target.items) - 1:
self.output.write(',')
else:
self._process_node(node.target, **kwargs)
self.output.write(')')
self.output.write('{')
if node.test:
self.output.write('if (!(')
self._process_node(node.test, **kwargs)
self.output.write(')) { return; }')
assigns = node.target.items if isinstance(node.target, nodes.Tuple) else [node.target]
with self._scoped_variables(assigns, **kwargs):
for n in node.body:
self._process_node(n, **kwargs)
with self._execution():
self.output.write('}')
self.output.write(')')
self.output.write(';')
# restore the stored names
self.stored_names = previous_stored_names | Processes a for loop. e.g.
{% for number in numbers %}
{{ number }}
{% endfor %}
{% for key, value in somemap.items() %}
{{ key }} -> {{ value }}
{% %} | entailment |
def _process_if(self, node, execute_end=None, **kwargs):
"""
Processes an if block e.g. `{% if foo %} do something {% endif %}`
"""
with self._execution():
self.output.write('if')
self.output.write('(')
with option(kwargs, use_python_bool_wrapper=True):
self._process_node(node.test, **kwargs)
self.output.write(')')
self.output.write('{')
# We accept an `execute_end` function as a keyword argument as this function is
# recursive in the case of something like if-elif-elif-else. In these cases this
# invocation of this function may have to close execution opened by a previous
# invocation of this function.
if execute_end:
execute_end()
# body
for n in node.body:
self._process_node(n, **kwargs)
if not node.else_ and not node.elif_:
# no else - just close the if
with self._execution():
self.output.write('}')
else:
# either an else or an elif
with self._execution() as execute_end:
self.output.write('}')
self.output.write(' else ')
# check for elif
for n in node.elif_:
self._process_node(n, execute_end=execute_end, **kwargs)
if node.elif_ and node.else_:
self.output.write(' else ')
# open up the body
self.output.write('{')
# process the body of the else
for n in node.else_:
self._process_node(n, **kwargs)
# close the body
with self._execution():
self.output.write('}') | Processes an if block e.g. `{% if foo %} do something {% endif %}` | entailment |
def _process_math(self, node, math_operator=None, function=None, **kwargs):
"""
Processes a math node e.g. `Div`, `Sub`, `Add`, `Mul` etc...
If `function` is provided the expression is wrapped in a call to that function.
"""
with self._interpolation():
if function:
self.output.write(function)
self.output.write('(')
self._process_node(node.left, **kwargs)
self.output.write(math_operator)
self._process_node(node.right, **kwargs)
if function:
self.output.write(')') | Processes a math node e.g. `Div`, `Sub`, `Add`, `Mul` etc...
If `function` is provided the expression is wrapped in a call to that function. | entailment |
def _process_loop_helper(self, node, **kwargs):
"""
Processes a loop helper e.g. {{ loop.first }} or {{ loop.index }}
"""
if node.attr == LOOP_HELPER_INDEX:
self.output.write('(arguments[1] + 1)')
elif node.attr == LOOP_HELPER_INDEX_0:
self.output.write('arguments[1]')
elif node.attr == LOOP_HELPER_FIRST:
self.output.write('(arguments[1] == 0)')
elif node.attr == LOOP_HELPER_LAST:
self.output.write('(arguments[1] == arguments[2].length - 1)')
elif node.attr == LOOP_HELPER_LENGTH:
self.output.write('arguments[2].length') | Processes a loop helper e.g. {{ loop.first }} or {{ loop.index }} | entailment |
def _execution(self):
"""
Context manager for executing some JavaScript inside a template.
"""
did_start_executing = False
if self.state == STATE_DEFAULT:
did_start_executing = True
self.state = STATE_EXECUTING
def close():
if did_start_executing and self.state == STATE_EXECUTING:
self.state = STATE_DEFAULT
yield close
close() | Context manager for executing some JavaScript inside a template. | entailment |
def _scoped_variables(self, nodes_list, **kwargs):
"""
Context manager for creating scoped variables defined by the nodes in `nodes_list`.
These variables will be added to the context, and when the context manager exits the
context object will be restored to it's previous state.
"""
tmp_vars = []
for node in nodes_list:
is_assign_node = isinstance(node, nodes.Assign)
name = node.target.name if is_assign_node else node.name
# create a temp variable name
tmp_var = next(self.temp_var_names)
# save previous context value
with self._execution():
# save the current value of this name
self.output.write('var %s = %s.%s;' % (tmp_var, self.context_name, name))
# add new value to context
self.output.write('%s.%s = ' % (self.context_name, name))
if is_assign_node:
self._process_node(node.node, **kwargs)
else:
self.output.write(node.name)
self.output.write(';')
tmp_vars.append((tmp_var, name))
yield
# restore context
for tmp_var, name in tmp_vars:
with self._execution():
self.output.write('%s.%s = %s;' % (self.context_name, name, tmp_var)) | Context manager for creating scoped variables defined by the nodes in `nodes_list`.
These variables will be added to the context, and when the context manager exits the
context object will be restored to it's previous state. | entailment |
def download_raw(url, local_path, callback):
"""
Download an url to a local file.
:param url: url of the file to download
:param local_path: path where the downloaded file should be saved
:param callback: instance of ProgressCallback
:return: True is succeeded
"""
log.debug('download_raw(url={url}, local_path={local_path})'.format(url=url, local_path=local_path))
raw_progress = RawDownloadProgress(callback)
reporthook = raw_progress.get_report_hook()
try:
log.debug('urlretrieve(url={url}, local_path={local_path}) ...'.format(url=url, local_path=local_path))
urlretrieve(url=url, filename=local_path, reporthook=reporthook)
log.debug('... SUCCEEDED')
callback.finish(True)
return True
except URLError:
log.exception('... FAILED')
callback.finish(False)
return False | Download an url to a local file.
:param url: url of the file to download
:param local_path: path where the downloaded file should be saved
:param callback: instance of ProgressCallback
:return: True is succeeded | entailment |
def get_report_hook(self):
"""
Return a callback function suitable for using reporthook argument of urllib(.request).urlretrieve
:return: function object
"""
def report_hook(chunkNumber, chunkSize, totalSize):
if totalSize != -1 and not self._callback.range_initialized():
log.debug('Initializing range: [{},{}]'.format(0, totalSize))
self._callback.set_range(0, totalSize)
self._chunkNumber = chunkNumber
self._total += chunkSize
if self._total > totalSize:
# The chunk size can be bigger than the file
self._total = totalSize
self._callback.update(self._total)
return report_hook | Return a callback function suitable for using reporthook argument of urllib(.request).urlretrieve
:return: function object | entailment |
def window_iterator(data, width):
"""
Instead of iterating element by element, get a number of elements at each iteration step.
:param data: data to iterate on
:param width: maximum number of elements to get in each iteration step
:return:
"""
start = 0
while start < len(data):
yield data[start:start+width]
start += width | Instead of iterating element by element, get a number of elements at each iteration step.
:param data: data to iterate on
:param width: maximum number of elements to get in each iteration step
:return: | entailment |
def set_range(self, minimum, maximum):
"""
Set a range.
The range is passed unchanged to the rangeChanged member function.
:param minimum: minimum value of the range (None if no percentage is required)
:param maximum: maximum value of the range (None if no percentage is required)
"""
self._min = minimum
self._max = maximum
self.on_rangeChange(minimum, maximum) | Set a range.
The range is passed unchanged to the rangeChanged member function.
:param minimum: minimum value of the range (None if no percentage is required)
:param maximum: maximum value of the range (None if no percentage is required) | entailment |
def get_child_progress(self, parent_min, parent_max):
"""
Create a new child ProgressCallback.
Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback.
:param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback
:param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback
:return: instance of SubProgressCallback
"""
return SubProgressCallback(parent=self, parent_min=parent_min, parent_max=parent_max) | Create a new child ProgressCallback.
Minimum and maximum values of the child are mapped to parent_min and parent_max of this parent ProgressCallback.
:param parent_min: minimum value of the child is mapped to parent_min of this parent ProgressCallback
:param parent_max: maximum value of the child is mapped to parent_max of this parent ProgressCallback
:return: instance of SubProgressCallback | entailment |
def update(self, value, *args, **kwargs):
"""
Call this function to inform that an update is available.
This function does NOT call finish when value == maximum.
:param value: The current index/position of the action. (Should be, but must not be, in the range [min, max])
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
log.debug('update(value={value}, args={args}, kwargs={kwargs})'.format(value=value, args=args, kwargs=kwargs))
self.on_update(value, *args, **kwargs) | Call this function to inform that an update is available.
This function does NOT call finish when value == maximum.
:param value: The current index/position of the action. (Should be, but must not be, in the range [min, max])
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on | entailment |
def finish(self, *args, **kwargs):
"""
Call this function to inform that the operation is finished.
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on
"""
log.debug('finish(args={args}, kwargs={kwargs})'.format(args=args, kwargs=kwargs))
self.on_finish(*args, **kwargs) | Call this function to inform that the operation is finished.
:param args: extra positional arguments to pass on
:param kwargs: extra keyword arguments to pass on | entailment |
def on_update(self, value, *args, **kwargs):
"""
Inform the parent of progress.
:param value: The value of this subprogresscallback
:param args: Extra positional arguments
:param kwargs: Extra keyword arguments
"""
parent_value = self._parent_min
if self._max != self._min:
sub_progress = (value - self._min) / (self._max - self._min)
parent_value = self._parent_min + sub_progress * (self._parent_max - self._parent_min)
self._parent.update(parent_value, *args, **kwargs) | Inform the parent of progress.
:param value: The value of this subprogresscallback
:param args: Extra positional arguments
:param kwargs: Extra keyword arguments | entailment |
def _login(self, username="", password=""):
"""Login to the Server using username/password,
empty parameters means an anonymously login
Returns True if login sucessful, and False if not.
"""
self.log.debug("----------------")
self.log.debug("Logging in (username: %s)..." % username)
def run_query():
return self._xmlrpc_server.LogIn(
username, password, self.language, self.user_agent)
info = self._safe_exec(run_query, None)
if info is None:
self._token = None
return False
self.log.debug("Login ended in %s with status: %s" %
(info['seconds'], info['status']))
if info['status'] == "200 OK":
self.log.debug("Session ID: %s" % info['token'])
self.log.debug("----------------")
self._token = info['token']
return True
else:
# force token reset
self.log.debug("----------------")
self._token = None
return False | Login to the Server using username/password,
empty parameters means an anonymously login
Returns True if login sucessful, and False if not. | entailment |
def _logout(self):
"""Logout from current session(token)
This functions doesn't return any boolean value, since it can 'fail' for anonymous logins
"""
self.log.debug("Logging out from session ID: %s" % self._token)
try:
info = self._xmlrpc_server.LogOut(self._token)
self.log.debug("Logout ended in %s with status: %s" %
(info['seconds'], info['status']))
except ProtocolError as e:
self.log.debug("error in HTTP/HTTPS transport layer")
raise
except Fault as e:
self.log.debug("error in xml-rpc server")
raise
except:
self.log.exception("Connection to the server failed/other error")
raise
finally:
# force token reset
self._token = None | Logout from current session(token)
This functions doesn't return any boolean value, since it can 'fail' for anonymous logins | entailment |
def parse_arguments(args=None):
"""
Parse the program arguments.
:return: argparse.Namespace object with the parsed arguments
"""
parser = get_argument_parser()
# Autocomplete arguments
autocomplete(parser)
ns = parser.parse_args(args=args)
return ArgumentSettings(
program=ArgumentProgramSettings(
log=ArgumentLogSettings(
path=None,
level=ns.loglevel,
),
settings=ArgumentSettingsSettings(
path=ns.settings_path,
),
client=ArgumentClientSettings(
type=ns.client_type,
cli=ArgumentClientCliSettings(
interactive=False,
),
gui=ArgumentClientGuiSettings(
),
),
),
search=ArgumentSearchSettings(
recursive=ns.recursive,
working_directory=ns.video_path,
),
filter=FilterSettings(
languages=ns.languages,
),
download=DownloadSettings(
rename_strategy=ns.rename_strategy,
),
providers=ns.providers,
proxy=ns.proxy,
test=ns.test,
) | Parse the program arguments.
:return: argparse.Namespace object with the parsed arguments | entailment |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.