content
stringlengths 35
762k
| sha1
stringlengths 40
40
| id
int64 0
3.66M
|
---|---|---|
def distr_selectbox_names():
"""
Accessing stats.name.
"""
names = ['alpha',
'anglit',
'arcsine',
'argus',
'beta',
'betaprime',
'bradford',
'burr',
'burr12',
'cauchy',
'chi',
'chi2',
'cosine',
'crystalball',
'dgamma',
'dweibull',
'erlang',
'expon',
'exponnorm',
'exponpow',
'exponweib',
'f',
'fatiguelife',
'fisk',
'foldcauchy',
'foldnorm',
'gamma',
'gausshyper',
'genexpon',
'genextreme',
'gengamma',
#'gengamma',
'genhalflogistic',
'geninvgauss',
'genlogistic',
'gennorm',
'genpareto',
'gilbrat',
'gompertz',
'gumbel_l',
'gumbel_r',
'halfcauchy',
'halfgennorm',
'halflogistic',
'halfnorm',
'hypsecant',
'invgamma',
'invgauss',
'invweibull',
'johnsonsb',
'johnsonsu',
'kappa3',
'kappa4',
#'kappa4',
#'kappa4',
#'kappa4',
'ksone',
'kstwo',
'kstwobign',
'laplace',
'laplace_asymmetric',
'levy',
'levy_l',
'levy_stable',
'loggamma',
'logistic',
'loglaplace',
'lognorm',
'loguniform',
'lomax',
'maxwell',
'mielke',
'moyal',
'nakagami',
'ncf',
'nct',
'ncx2',
'norm',
'norminvgauss',
'pareto',
'pearson3',
'powerlaw',
'powerlognorm',
'powernorm',
'rayleigh',
'rdist',
'recipinvgauss',
'reciprocal',
'rice',
'semicircular',
'skewnorm',
't',
'trapezoid',
'triang',
'truncexpon',
'truncnorm',
#'truncnorm',
'tukeylambda',
'uniform',
'vonmises',
'vonmises_line',
'wald',
'weibull_max',
'weibull_min',
'wrapcauchy']
return names
|
5051cab27bf6497d3dfb4d4828daeaeefa528403
| 35,376 |
def read_file(filename):
"""Read filename; return contents as one string."""
with open(filename) as my_file:
return my_file.read()
|
fa4b47085f5d3ace5c011fcda27e6ffa94c7085a
| 35,377 |
def remove_intercept_column(X: np.ndarray) -> np.ndarray:
"""
Remove the first column
"""
if len(X.shape) == 1:
return X[1:]
return X[:, 1:]
|
40b8bccab207e3293cca69ed99f641dbe4a17198
| 35,378 |
from typing import Tuple
def get_user_configurations() -> Tuple[list, list]:
"""
Function that reads the $HOME/.pip folder and return two lists,
containing filenames and absolute path from them.
"""
pip_config_path = UserPath.PIP_CONFIG_DIRECTORY.value
config_filenames = read.get_user_configuration_files()
config_filepaths = [pip_config_path + "/" + filename for filename in config_filenames]
return config_filenames, config_filepaths
|
422f34ed175a31b3b3070154ecb0c38cd1ca19d9
| 35,379 |
def say(number):
"""
print out a number as words in North American English using short scale terms
"""
number = int(number)
if number < 0 or number >= 1e12:
raise ValueError
if number == 0:
return "zero"
def quotient_and_remainder(number, divisor):
"""
return the integer quotient and remainder of dividing number by divisor
"""
divisor = int(divisor)
remainder = number % divisor
quotient = (number - remainder) // divisor
return quotient, remainder
def say_term(which, terms):
"""
return a term from a tuple of strings as a list of one element
"""
return terms[which : which + 1]
def say_tens(number):
"""
return a string representing a number less than 100 in English
"""
terms = []
quotient, remainder = quotient_and_remainder(number, 10)
if quotient == 1:
terms += say_term(remainder,
("ten", "eleven", "twelve", "thirteen", "fourteen",
"fifteen", "sixteen", "seventeen", "eighteen", "nineteen"))
else:
if quotient:
terms += say_term(quotient,
("units", "teens", "twenty", "thirty", "forty",
"fifty", "sixty", "seventy", "eighty", "ninety"))
if remainder:
terms += say_term(remainder,
("zero", "one", "two", "three", "four",
"five", "six", "seven", "eight", "nine"))
return '-'.join(terms)
def say_hundreds(number, final=False):
"""
return a string representing a number less than 1000 in English
"""
terms = []
quotient, remainder = quotient_and_remainder(number, 100)
if quotient:
terms += [say_tens(quotient), "hundred"]
if remainder:
if quotient or final:
terms += ["and"]
terms += [say_tens(remainder)]
return terms
# now finally convert a number less than a million million
terms = []
quotient, remainder = quotient_and_remainder(number, 1e9)
if quotient:
terms += say_hundreds(quotient) + ["billion"]
quotient, remainder = quotient_and_remainder(remainder, 1e6)
if quotient:
terms += say_hundreds(quotient) + ["million"]
quotient, remainder = quotient_and_remainder(remainder, 1e3)
if quotient:
terms += say_hundreds(quotient) + ["thousand"]
if remainder:
terms += say_hundreds(remainder, terms != [])
return ' '.join(terms)
|
42b8d321c001c60e37f6bbd94bd2a3404ddf5c66
| 35,381 |
def get_s3_versions(bucket_name, key_name):
"""Get versioning information for a given key.
:param bucket_name: the bucket's name
:type bucket_name: string
:param key_name: the key's name
:type key_name: string
:return: for each version, the version id and the last modified date
:rtype: a list of tuples, where tuple[0] is a string and tuple[1] a
`datetime` instance.
**NB:** it assumes a versioned bucket.
"""
client = get_s3_resource()
# may be worth comparing with
# client.list_object_versions(prefix)
versions = client.Bucket(bucket_name).\
object_versions.filter(Prefix=key_name)
version_ids = [
(
v.get().get('VersionId'),
v.get().get('LastModified')
)
for v in versions
if v.size is not None
]
return version_ids
|
85fc1f51f69e4cc326f043be7c65d48df3343854
| 35,382 |
def indentitems(items, indent, level):
"""Recursively traverses the list of json lines, adds indentation based on the current depth"""
res = ""
indentstr = " " * (indent * level)
for (i, item) in enumerate(items):
if isinstance(item, list):
res += indentitems(item, indent, level+1)
else:
islast = (i==len(items)-1)
# no new line character after the last rendered line
if level==0 and islast:
res += indentstr + item
else:
res += indentstr + item + "\n"
return res
|
91adea46ab0cda227167869235e5e54311ab199a
| 35,383 |
def dcg_at_k(r, k):
"""
Args:
r: Relevance scores (list or numpy) in rank order
(first element is the first item)
k: Number of results to consider
Returns:
Discounted cumulative gain
"""
assert k >= 1
r = np.asfarray(r)[:k] != 0
if r.size:
return np.sum(
np.subtract(np.power(2, r), 1) / np.log2(np.arange(2, r.size + 2))
)
return 0.0
|
18b862b819170fb0c8049f57fe2c1448fc277260
| 35,384 |
def switch(parser, token):
"""
Switch tag. Usage::
{% switch meal %}
{% case "spam" %}...{% endcase %}
{% case "eggs" %}...{% endcase %}
{% endswitch %}
Note that ``{% case %}`` arguments can be variables if you like (as can
switch arguments, buts that's a bit silly).
"""
# Parse out the arguments.
args = token.split_contents()
if len(args) != 2:
raise template.TemplateSyntaxError("%s tag tags exactly 2 arguments." % args[0])
# Pull out all the children of the switch tag (until {% endswitch %}).
childnodes = parser.parse(("endswitch",))
# Remove the {% endswitch %} node so it doesn't get parsed twice.
parser.delete_first_token()
# We just care about case children; all other direct children get ignored.
casenodes = childnodes.get_nodes_by_type(CaseNode)
return SwitchNode(args[1], casenodes)
|
f4522eaacbca83e17a604c57026a07abeea42fb7
| 35,385 |
import re
def rm_noise(diff):
"""Filter out noise from diff text.
Args:
diff (str): diff text
Returns:
str: cleaned diff text
"""
result = diff
patterns = ["\n", "\u0020+", "་+?"]
for pattern in patterns:
noise = re.search(pattern, diff)
if noise:
result = result.replace(noise[0], "")
return result
|
8a139f22e30e3c98b1dfef3b47fa623db8b22a29
| 35,386 |
async def auth_relogin(sessionid: str = Form(...),
clients: ClientStorage = Depends(get_clients)) -> str:
"""Relogin by username and password (with clean cookies)
"""
cl = clients.get(sessionid)
result = cl.relogin()
return result
|
829981336379e157f164beadffdbc5c576e095a1
| 35,387 |
def delete_qsession_command(session_id: str, cloud_request_id: str) -> dict:
"""
Delete a queued RTR session command by session ID and cloud request ID
:param session_id:
:param cloud_request_id:
"""
endpoint_url = '/real-time-response/entities/queued-sessions/command/v1'
if helpers.is_expiring(BATCH_LIFE_TIME, BATCH_REQ_TIME):
refresh_rtr_session()
params = {
'session_id': session_id,
'cloud_request_id': cloud_request_id
}
response = http_request('DELETE', endpoint_url, params=params)
return response
|
dd2bab4531d3a0523022e46245b6d41ef202684d
| 35,388 |
def dict_factory(cursor, row):
""" convert sursor into dict """
result = {}
for idx, col in enumerate(cursor.description):
result[col[0]] = row[idx]
return result
|
9de5c6252cb36961c645c9b43bd5f7a8a66b4deb
| 35,389 |
def variant_with_no_attributes(category):
"""Create a variant having no attributes, the same for the parent product."""
product_type = ProductType.objects.create(
name="Test product type", has_variants=True, is_shipping_required=True
)
product = Product.objects.create(
name="Test product",
price=Money(10, "USD"),
product_type=product_type,
category=category,
)
variant = ProductVariant.objects.create(product=product, sku="123")
return variant
|
9bf4f09456d00f638e99cc2b6218f54de627f193
| 35,390 |
def spec_resid(pars,wave,flux,err,models,spec):
"""
This helper function calculates the residuals between an observed spectrum and a Cannon model spectrum.
Parameters
----------
pars : array
Input parameters [teff, logg, feh, rv].
wave : array
Wavelength array for observed spectrum.
flux : array
Observed flux array.
err : array
Uncertainties in the observed flux.
models : list of Cannon models
List of Cannon models to use
spec : Spec1D
The observed spectrum. Needed to run cannon.model_spectrum().
Outputs
-------
resid : array
Array of residuals between the observed flux array and the Cannon model spectrum.
"""
#m = cannon.model_spectrum(models,spec,teff=pars[0],logg=pars[1],feh=pars[2],rv=pars[3])
m = models(teff=pars[0],logg=pars[1],feh=pars[2],rv=pars[3])
if m is None:
return np.repeat(1e30,len(flux))
resid = (flux-m.flux.flatten())/err
return resid
|
d8ea49975717693ec8b6b4003e035e04a847c9b4
| 35,391 |
import calendar
def timestamp_d_b_Y_H_M_S(value):
"""Convert timestamp string to time in seconds since epoch.
Timestamps strings like '18 Jun 2013 12:00:00 GMT' are able to be converted
by this function.
Args:
value: A timestamp string in the format '%d %b %Y %H:%M:%S GMT'.
Returns:
The time in seconds since epoch as an integer.
Raises:
ValueError: If timestamp is invalid.
KeyError: If the abbrieviated month is invalid.
Note: The timezone is ignored it is simply assumed to be UTC/GMT.
"""
d, b, Y, t, Z = value.split()
H, M, S = t.split(":")
return int(calendar.timegm((
int(Y), _months[b.lower()], int(d), int(H), int(M), int(S), 0, 0, 0
)))
|
20484ba19cf54c4b152763a5170568f18d0ca492
| 35,393 |
def constrained_fit(
model_constructor,
pdf_transform=False,
default_rtol=1e-10,
default_atol=1e-10,
default_max_iter=int(1e7),
learning_rate=1e-6,
):
"""
Wraps a series of functions that perform maximum likelihood fitting in the
`two_phase_solver` method found in the `fax` python module. This allows for
the calculation of gradients of the best-fit parameters with respect to upstream
parameters that control the underlying model, i.e. the event yields (which are
then parameterized by weights or similar).
Args:
model_constructor: Function that takes in the parameters of the observable,
and returns a model object (and background-only parameters)
Returns:
constrained_fitter: Callable function that performs constrained fits.
Differentiable :)
"""
adam_init, adam_update, adam_get_params = optimizers.adam(learning_rate)
def make_model(hyper_pars):
model_pars, constrained_mu = hyper_pars
m, bonlypars = model_constructor(model_pars)
bounds = m.config.suggested_bounds()
constrained_mu = (
to_inf(constrained_mu, bounds[0]) if pdf_transform else constrained_mu
)
exp_bonly_data = m.expected_data(bonlypars, include_auxdata=True)
def expected_logpdf(pars): # maps pars to bounded space if pdf_transform = True
return (
m.logpdf(to_bounded_vec(pars, bounds), exp_bonly_data)
if pdf_transform
else m.logpdf(pars, exp_bonly_data)
)
def constrained_fit_objective(nuis_par): # NLL
pars = jnp.concatenate([jnp.asarray([constrained_mu]), nuis_par])
return -expected_logpdf(pars)[0]
return constrained_mu, constrained_fit_objective
def constrained_bestfit_minimized(hyper_pars):
mu, cnll = make_model(hyper_pars)
def bestfit_via_grad_descent(i, param): # gradient descent
_, np = param[0], param[1:]
g = jax.grad(cnll)(np)
np = adam_get_params(adam_update(i, g, adam_init(np)))
param = jnp.concatenate([jnp.asarray([mu]), np])
return param
return bestfit_via_grad_descent
constrained_solver = twophase.two_phase_solver(
param_func=constrained_bestfit_minimized,
default_rtol=default_rtol,
default_atol=default_atol,
default_max_iter=default_max_iter,
)
def constrained_fitter(init, hyper_pars):
solve = constrained_solver(init, hyper_pars)
return solve.value
return constrained_fitter
|
4febeb404746bd233b7a936826380b5d5b9d49b1
| 35,394 |
import numpy
def word2array(ft_names, word):
"""Converts `word` [[(value, feature),...],...] to a NumPy array
Given a word consisting of lists of lists/sets of (value, feature) tuples,
return a NumPy array where each row is a segment and each column is a
feature.
Args:
ft_names (list): list of feature names (as strings) in order; this
argument controls what features are included in the
array that is output and their order vis-a-vis the
columns of the array
word (list): list of lists of feature tuples (output by
FeatureTable.word_fts)
Returns:
ndarray: array in which each row is a segment and each column
is a feature
"""
vdict = {'+': 1, '-': -1, '0': 0}
def seg2col(seg):
seg = dict([(k, v) for (v, k) in seg])
return [vdict[seg[ft]] for ft in ft_names]
return numpy.array([seg2col(s) for s in word], order='F')
|
4305f7b85287f70ffc7cb9ade2c8c2663dc11659
| 35,395 |
from typing import List
from typing import Sequence
from typing import Dict
from typing import Union
import random
def random_motif_search(
sequences: List[Sequence],
pattern_length: int,
laplace: bool = True,
) -> Dict[str, Union[List[Sequence], Sequence, int]]:
"""Finds a motif matrix in a randomized search.
Args:
sequences: Collection of sequences that will be searched for patterns.
pattern_length: Fixed length of patterns.
laplace: If True then pseudocounts are added to the profile matrix
to reduce its sparsity.
Returns:
A motif matrix for the given collection of sequences, as well as
the consensus string and the motif score.
"""
num_seqs = len(sequences)
last_pos = sequences[0].length - pattern_length + 1
motifs = [None] * num_seqs
for i, seq in enumerate(sequences):
start_pos = random.randrange(0, last_pos)
end_pos = start_pos + pattern_length
motifs[i] = Sequence(seq.sequence[start_pos:end_pos])
best_motifs = motifs
best_motifs_results = find_consensus_motif(
best_motifs,
pattern_length,
laplace=laplace,
)
best_motifs_score = best_motifs_results["Score"]
consensus_motif = best_motifs_results["Consensus"]
while True:
profile_matrix = find_consensus_motif(
motifs,
pattern_length,
laplace=laplace,
)["Matrix"]
for i, seq in enumerate(sequences):
motif_str = seq.most_probable_string(pattern_length, profile_matrix)
motifs[i] = Sequence(motif_str)
motif_results = find_consensus_motif(
motifs,
pattern_length,
laplace=laplace,
)
motifs_score = motif_results["Score"]
if motifs_score < best_motifs_score:
best_motifs = motifs
best_motifs_score = motifs_score
consensus_motif = Sequence(motif_results["Consensus"])
else:
results = {
"Motif matrix": best_motifs,
"Score": best_motifs_score,
"Consensus": consensus_motif,
}
return results
|
5b680f5d0015ce50298156a8e7e7755b2591ee6e
| 35,396 |
def get_tweet_sentiment(tweets):
"""
Uses the VADER SentimentIntensityAnalyzer from NLTK to classify tweet sentiment polarity.
Takes in input a list of tweets (text-only, not JSON).
Checks which party a tweet refers to and averages the score for all tweets for each party.
Returns a dictionary of the parties and their average sentiment score (compound).
"""
scores = {"con": [], "lab": [], "lib": [], "snp": []}
averages = {"con": [], "lab": [], "lib": [], "snp": []}
sid = SentimentIntensityAnalyzer()
for tweet in tweets:
ss = sid.polarity_scores(tweet.replace("#", "")) # get the sentiment analysis scores for each tweet
c_score = ss['compound'] # take the compound score, between -1 and 1
if any(word in tweet.lower() for word in CON_WORDS):
scores['con'].append(c_score)
if any(word in tweet.lower() for word in LAB_WORDS):
scores['lab'].append(c_score)
if any(word in tweet.lower() for word in LIB_WORDS):
scores['lib'].append(c_score)
if any(word in tweet.lower() for word in SNP_WORDS):
scores['snp'].append(c_score)
for party, score_list in scores.items():
if len(score_list) != 0:
average = sum(score_list)/len(score_list) # average sentiment per party per tweet
else:
average = 0
averages[party] = average
return averages
|
75edd974b667f1409f9261522c5dd5c338b5ae4b
| 35,397 |
def external(field):
"""
Mark a field as external.
"""
field._external = True
return field
|
83de43305f9655aa2be9c6b7264552bd3e2783f7
| 35,399 |
from typing import Union
from typing import IO
from typing import Any
from typing import Optional
import yaml
def from_file(
file: Union[IO[str], str],
*,
app: Any = None,
base_url: Optional[str] = None,
method: Optional[Filter] = None,
endpoint: Optional[Filter] = None,
tag: Optional[Filter] = None,
operation_id: Optional[Filter] = None,
skip_deprecated_operations: bool = False,
validate_schema: bool = True,
force_schema_version: Optional[str] = None,
data_generation_methods: DataGenerationMethodInput = DEFAULT_DATA_GENERATION_METHODS,
code_sample_style: str = CodeSampleStyle.default().name,
location: Optional[str] = None,
**kwargs: Any, # needed in the runner to have compatible API across all loaders
) -> BaseOpenAPISchema:
"""Load Open API schema from a file descriptor, string or bytes.
:param file: Could be a file descriptor, string or bytes.
"""
try:
raw = yaml.load(file, StringDatesYAMLLoader)
return from_dict(
raw,
app=app,
base_url=base_url,
method=method,
endpoint=endpoint,
tag=tag,
operation_id=operation_id,
skip_deprecated_operations=skip_deprecated_operations,
validate_schema=validate_schema,
force_schema_version=force_schema_version,
data_generation_methods=data_generation_methods,
code_sample_style=code_sample_style,
location=location,
)
except yaml.YAMLError as exc:
raise SchemaLoadingError(YAML_LOADING_ERROR) from exc
|
3dc7ea5805fa82648c98878de02d6d37b6e432c2
| 35,400 |
def rand_score_choice():
"""
返回一个看似合理分布的成绩
:return: int
"""
score_choice = [randint(50, 80), randint(40, 60), randint(60, 80), randint(60, 80), randint(70, 90), randint(80, 100)]
return choice(score_choice)
|
b34492746048c3ea42ab927b1d4be11bba810419
| 35,401 |
def getExactFreePlaceIndexForCoordinate(freePlaceMap, x, y):
"""
Returns the Exact Value for a given Coordinate on the FreePlaceMap
:param freePlaceMap: The generated FreePlaceMap
:param x: The X Coordinate on the FreePlaceMap
:param y: The Y Coordinate on the FreePlaceMap
:return: The Indexvalue on the FreePlaceMap
"""
if freePlaceMap is None or len(freePlaceMap) <= y or len(freePlaceMap[0]) <= x or x < 0 or y < 0:
return None
if freePlaceMap[y][x] != -1:
return freePlaceMap[y][x] - 1
return None
|
4af9dec9163bd505f944f02db55a2dcfa80cb434
| 35,402 |
from typing import Optional
async def get_bc_history(start_time: Optional[str] = None, end_time: Optional[str] = None, isp: Optional[str] = ''):
"""
## **param**:
start_time: 开始时间(可选参数) str 默认 当前时间前一月
end_time: 结束时间(可选参数) str 默认 当前时间
isp: 运营商(可选参数) str 默认 '', '移动'(测试)
## **return**:
[
{
"移动": {
"省份": "天津市",
"运营商": "移动",
"上线上报比例": "46%",
"下线上报比例": "55%",
"访问上报比例": "36%",
"日志符合规范比例": "100%",
"日志正常加载比例": "100%",
"日志正常查询比例": "51%",
"外网代理上报比例": "0%",
"公网IP上报比例": "100%",
"公网IP准确性比例": "28%",
"IMEI上报正确性比例": "99%",
"IMSI上报正确性比例": "100%",
"LAC上报正确性比例": "99%",
"Ci上报正确性比例": "95%",
"结果条数": "1940",
"时间": "2021-07-28"
}
},
...
"""
if start_time == None:
start_time = get_before_month()
if end_time == None:
end_time = get_now_date()
db = MySqLHelper()
sql = """
SELECT
province,
isp,
online_report_rate,
offline_report_rate,
access_report_rate,
log_standard_rate,
log_loading_rate,
log_query_rate,
extranet_report_rate,
ip_report_rate,
ip_accurate_rate,
imei_correct_rate,
imsi_correct_rate,
lac_correct_rate,
ci_correct_rate,
total_nums,
d_time
FROM
t_ipsy_bc
WHERE
"""
if isp:
sql += """
isp = '{}'
AND d_time BETWEEN '{}'
AND '{}'
""".format(isp, start_time, end_time)
else:
sql += """
d_time BETWEEN '{}'
AND '{}'
""".format(start_time, end_time)
print(sql)
rows = db.selectall(sql=sql)
data_list = [list(row) for row in rows]
temp_data = data_processing(data_list, 2000)
result = {}
for item in temp_data:
temp_dict = {}
temp_dict['province'] = item[0]
temp_dict['isp'] = item[1]
temp_dict['online_report_rate'] = item[2]
temp_dict['offline_report_rate'] = item[3]
temp_dict['access_report_rate'] = item[4]
temp_dict['log_standard_rate'] = item[5]
temp_dict['log_loading_rate'] = item[6]
temp_dict['log_query_rate'] = item[7]
temp_dict['extranet_report_rate'] = item[8]
temp_dict['ip_report_rate'] = item[9]
temp_dict['ip_accurate_rate'] = item[10]
temp_dict['imei_correct_rate'] = item[11]
temp_dict['imsi_correct_rate'] = item[12]
temp_dict['lac_correct_rate'] = item[13]
temp_dict['ci_correct_rate'] = item[14]
temp_dict['total_nums'] = item[15]
temp_dict['d_time'] = item[16]
if item[1] not in result.keys():
temp_list = result.setdefault(item[1], [])
temp_list.append(temp_dict)
else:
result[item[1]].append(temp_dict)
return comm_ret(data = result)
|
11dd27f17f4ba61fcd01332bff6a520a8373c415
| 35,403 |
def get_member_class(resource):
"""
Returns the registered member class for the given resource.
:param resource: registered resource
:type resource: class implementing or instance providing or subclass of
a registered resource interface.
"""
reg = get_current_registry()
if IInterface in provided_by(resource):
member_class = reg.getUtility(resource, name='member-class')
else:
member_class = reg.getAdapter(resource, IMemberResource,
name='member-class')
return member_class
|
50278b01b11760ccd38025389c2558737d73c7a4
| 35,404 |
def split_on_text(row):
"""Spliting original text into million character blocks for Spacy"""
val = round(row['original_text_length'] / 1000000)
final_texts = []
count = 1000000
counter = 0
for i in range(0, val):
if (count + 1000000) > row['original_text_length']:
final_texts.append(row.text[count:])
else:
final_texts.append(row.text[counter:count])
counter = counter + 1000000
count = count + 1000000
return final_texts
|
678377650df3ca49cfb0d4404382589e32e3c6ae
| 35,405 |
def make_child_node(parent_node, action, state):
""" Construct an child search node """
return SearchNode(state, parent_node, action)
|
4f7f0d91cbf7384c81c801adb1273e3c9c7c2916
| 35,407 |
def get_number(number):
"""
Repeats back a number to you
---
operationId: getPetsById
parameters:
- name: number
in: path
type: string
description: the number
responses:
200:
description: Hello number!
"""
return "Hello {}!".format(number)
|
22d6c8a7a5b3a8ff946e4dccaf5876134a0293cd
| 35,408 |
def get_user_by_id(uid, session=None):
"""Get user by id."""
with session_scope() as session:
return session.query(User)\
.filter(User.id == uid)\
.first().to_json()
|
6273d51340b837bf12afa6eb3c77a03af88eacba
| 35,409 |
def make_grayscale(img: np.ndarray) -> np.ndarray:
"""Turns BGR image into grayscale."""
if len(img.shape) == 3 and img.shape[2] == 3:
return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
else:
return img
|
7fd184948d671ce501dc3a6e9396de0a5601d9a7
| 35,410 |
def GetKM3NeTOMGelAbsorptionLength():
"""
A function to return the absorption length
the gel of an KM3NeT OM
Note: The file hit-ini_optic.f has three different
datasets for this absorption length!
However in the file hit.f it always is initialized with the
same (gel_id=1). Thus this one is implemented here.
"""
# Data copied from the km3 file hit-ini_optic.f
# GEL WACKER (default)
al_gel_default_reverse = [100.81, # at 610 nm
99.94, # at 600 nm
99.89,
96.90,
96.42,
94.36,
89.09,
90.10,
86.95,
85.88,
84.49,
81.08,
78.18,
76.48,
74.55,
72.31,
68.05,
66.91,
64.48,
62.53,
59.38,
56.64,
53.29,
48.96,
45.71,
41.88,
37.14,
30.49,
23.08,
15.60,
8.00,
0.00 # at 300 nm
]
# Apply units
al_gel_default_reverse = [ (i * I3Units.cm) for i in al_gel_default_reverse]
al_gel_default_reverse.reverse() # reverse the list (in-place)
return I3CLSimFunctionFromTable(300.*I3Units.nanometer, 10.*I3Units.nanometer, al_gel_default_reverse)
|
2fc250b636a3d20baac3a3a28de4604132fe2ab1
| 35,411 |
def align_frontiers_on_bars(frontiers, bars):
"""
Aligns the frontiers of segments to the closest bars (in time).
The idea is that frontiers generally occurs on downbeats,
and that realigning the estimation could improve perfomance for low tolerances scores.
Generally used for comparison with techniques which don't align their segmentation on bars.
Parameters
----------
frontiers : list of float
Time of the estimated frontiers.
bars : list of tuple of float
The bars of the signal.
Returns
-------
frontiers_on_bars : list of floats
Frontiers, realigned on bars.
"""
frontiers_on_bars = []
i = 1
for frontier in frontiers:
while i < len(bars) - 1 and bars[i][1] < frontier:
i+=1
if i == len(bars) - 1:
frontiers_on_bars.append(frontier)
else:
if bars[i][1] - frontier < frontier - bars[i][0]:
frontiers_on_bars.append(bars[i][1])
else:
frontiers_on_bars.append(bars[i][0])
return frontiers_on_bars
|
ef1f3d62a36065f64d31c4e4d7f6ce07045e2e5e
| 35,412 |
def sudo_command(cmd, user=None, password=None, extraopts=None):
"""Run a command with sudo and return the output, a tuple of (stdout,
stderr).
"""
proc = sudo(cmd, user=user, password=password, extraopts=extraopts)
return process.run_process(proc)
|
605372faca312b95f3c62244dde371e7550b72cc
| 35,413 |
def FK42FK5Matrix(t=None):
"""
----------------------------------------------------------------------
Purpose: Create a matrix to precess from B1950 in FK4 to J2000 in FK5
following to Murray's (1989) procedure.
Input: t, a Besselian epoch as epoch of observation.
Returns: Transformation matrix M as in XYZfk5 = M * XYZfk4
Reference: Murray, C.A. The Transformation of coordinates between the
systems B1950.0 and J2000.0, and the principal galactic axis
referred to J2000.0,
Astronomy and Astrophysics (ISSN 0004-6361), vol. 218, no. 1-2,
July 1989, p. 325-329.
Poppe P.C.R.,, Martin, V.A.F., Sobre as Bases de Referencia Celeste
SitientibusSerie Ciencias Fisicas
Notes: Murray precesses from B1950 to J2000 using a precession matrix
by Lieske. Then applies the equinox correction and ends up with a
transformation matrix X(0) as given in this function.
In Murray's article it is proven that using the procedure as
described in the article, r_fk5 = X(0).r_fk4 for extra galactic
sources where we assumed that the proper motion in FK5 is zero.
This procedure is independent of the epoch of observation.
Note that the matrix is not a rotation matrix.
FK4 is not an inertial coordinate frame (because of the error
in precession and the motion of the equinox. This has
consequences for the proper motions. e.g. a source with zero
proper motion in FK5 has a fictious proper motion in FK4.
This affects the actual positions in a way that the correction
is bigger if the epoch of observation is further away from 1950.0
The focus of this library is on data of which we do not have
information about the proper motions. So for positions of which
we allow non zero proper motion in FK5 one needs to supply the
epoch of observation
----------------------------------------------------------------------
"""
r11 = 0.9999256794956877; r12 = -0.0111814832204662; r13 = -0.0048590038153592
r21 = 0.0111814832391717; r22 = 0.9999374848933135; r23 = -0.0000271625947142
r31 = 0.0048590037723143; r32 = -0.0000271702937440; r33 = 0.9999881946023742
if t != None: # i.e. we also assuming that v != 0 in FK5 !!
jd = epochBessel2JD(t)
T = (jd-2433282.423)/36525.0 # t-1950 in Julian centuries = F^-1.t1 from Murray (1989)
r11 += -0.0026455262*T/1000000.0
r12 += -1.1539918689*T/1000000.0
r13 += 2.1111346190*T/1000000.0
r21 += 1.1540628161*T/1000000.0
r22 += -0.0129042997*T/1000000.0
r23 += 0.0236021478*T/1000000.0
r31 += -2.1112979048*T/1000000.0
r32 += -0.0056024448*T/1000000.0
r33 += 0.0102587734*T/1000000.0
return n.matrix( ([r11,r12,r13],[r21,r22,r23],[r31,r32,r33]) )
|
711ae31af8d6b0e55d940a3dd75cdc724c46e80c
| 35,414 |
def _rec_superdense(packet):
"""
Receives a superdense qubit and decodes it.
Args:
packet (Packet): The packet in which to receive.
Returns:
dict: A dictionary consisting of decoded superdense message and sequence number
"""
receiver = packet.receiver
sender = packet.sender
payload = packet.payload
host_receiver = network.get_host(receiver)
q1 = host_receiver.get_data_qubit(sender, payload.id, wait=WAIT_TIME)
q2 = host_receiver.get_epr(sender, payload.id, wait=WAIT_TIME)
assert q1 is not None and q2 is not None
if packet.await_ack:
_send_ack(packet.sender, packet.receiver, packet.seq_num)
return {'sender': packet.sender, 'message': _decode_superdense(q1, q2),
SEQUENCE_NUMBER: packet.seq_num}
|
f8f20554170b0549d71d0ed666dbea987148791f
| 35,415 |
def _high_bit(value):
"""returns index of highest bit, or -1 if value is zero or negative"""
return value.bit_length() - 1
|
1bd783593ae7d5b15cc56c8a8db5c86798fd8c9f
| 35,416 |
import re
from bs4 import BeautifulSoup
def parse_round(bsoup, rnd, gid, airdate):
"""Parses and inserts the list of clues from a whole round."""
round_id = "jeopardy_round" if rnd == 1 else "double_jeopardy_round"
r = bsoup.find(id=round_id)
# The game may not have all the rounds
if not r:
return False
# The list of categories for this round
categories = [c.get_text() for c in r.find_all("td", class_="category_name")]
# The x_coord determines which category a clue is in
# because the categories come before the clues, we will
# have to match them up with the clues later on.
x = 0
for a in r.find_all("td", class_="clue"):
is_missing = True if not a.get_text().strip() else False
if not is_missing:
value = a.find("td", class_=re.compile("clue_value")).get_text().lstrip("D: $")
text = a.find("td", class_="clue_text").get_text()
answer = BeautifulSoup(a.find("div", onmouseover=True).get("onmouseover"), "lxml")
answer = answer.find("em", class_="correct_response").get_text()
insert([gid, airdate, rnd, categories[x], value, text, answer])
x = 0 if x == 5 else x + 1
return True
|
19cc79523de901ae773f5c808795c8c160291310
| 35,417 |
def on_end_validation(func):
""" The :func:`on_end_validation` decorator is used to initialise a :class:`.Callback` with :meth:`~.Callback.on_end_validation`
calling the decorated function
Example: ::
>>> import torchbearer
>>> from torchbearer import Trial
>>> from torchbearer.callbacks import on_end_validation
# Example callback running at the end of each validation pass.
>>> @on_end_validation
... def print_callback(state):
... print('Finished validating.')
>>> trial = Trial(None, callbacks=[print_callback]).for_steps(1).for_val_steps(1).run()
Finished validating.
Args:
func (function): The function(state) to *decorate*
Returns:
Callback: Initialised callback with :meth:`~.Callback.on_end_validation` calling func
"""
return bind_to(Callback.on_end_validation)(func)
|
d58ac5f6cb4e2da08480e5ac9a18ec425b5f4946
| 35,418 |
from typing import Any
from typing import List
def transpose_checker(
attrs: Any, args: List[relay.expr.Expr], op_name: str
) -> bool: # pylint: disable=unused-variable
"""Check if transpose is supported by TensorRT."""
if get_tensorrt_use_implicit_batch_mode() and int(attrs.axes[0]) != 0:
logger.info(f"{op_name}: can't modify batch dimension.")
return False
return True
|
fb5a3d0cbade224d269a1ca5bc54d86cff9ca6a1
| 35,419 |
from pathlib import Path
def project_root() -> Path:
"""Returns project root folder."""
return Path(__file__).parent
|
7296e26ab57a3adcbde65df7ed5f1976ff3b84ca
| 35,420 |
def qual(obj):
"""
Return fully qualified name of a class.
"""
return u'{}.{}'.format(obj.__class__.__module__, obj.__class__.__name__)
|
5b9779935b84a8bb3653cc9fc2c627dda5dd0e7f
| 35,421 |
def treefactorial(high: int, low: int=None) -> int:
"""Pure Python factorial, no imports by Daniel Fischer @stackoverflow.com
Daniel Fischer says algorithm is old.
"""
if high < 2:
return 1
if low is None:
return treefactorial(high, 1)
if low + 1 < high:
mid: int= (low + high) // 2
return treefactorial(mid, low)*treefactorial(high, mid + 1)
if low == high:
return low
return low*high
|
eda784b853ca7305d4206a54b911c1b10e645587
| 35,422 |
import math
def squarish_factors(x):
"""Returns the closest pair of factors of x.
Parameters
----------
x : int
Examples
--------
>>> squarish_factors(20)
(5, 4)
>>> squarish_factors(36)
(6, 6)
>>> squarish_factors(53)
(53, 1)
>>> squarish_factors(0)
(0, 0)
"""
if is_square(x):
sr = int(math.sqrt(x))
return sr, sr
mid = math.floor(math.sqrt(x))
for d in range(mid,0,-1):
if x%d == 0:
return int(x/d),d
# if the above loop completes, there's a problem
raise ValueError("X must be a positive integer.")
|
a5d7cb9983d86d755622fd4bc9e495d7271d7719
| 35,425 |
def get_evidence(row: pd.Series) -> Evidence:
"""Return evidence for a Statement.
Parameters
----------
row :
Currently investigated row of the dataframe.
Returns
-------
:
Evidence object with the source_api, the PMID and the original
sentence.
"""
pmid = str(row['id']) if row['id'] else None
evidence = Evidence(source_api='gnbr',
pmid=pmid,
text=row['sentence'],
text_refs={'PMID': pmid})
return evidence
|
bc54b6850957a7ab4a1e37289f17f04fc24ee25d
| 35,426 |
def config_bgp(dut, **kwargs):
"""
config_bgp(dut = DUT1, router_id = '9.9.9.9', local_as='100', neighbor ='192.168.3.2', remote_as='200', config = 'yes', config_type_list =["neighbor"])
config_bgp(dut = DUT1, local_as='100', remote_as='200', neighbor ='2001::2', config = 'yes', config_type_list =["neighbor"]
config_bgp(dut = DUT1, local_as='100',config = 'yes',config_type_list =["redist"], redistribute ='connected')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'yes',config_type_list =["bfd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'yes',config_type_list =["bfd","redist"], redistribute ='connected')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'yes', password ='broadcom' ,config_type_list =["pswd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config = 'no', password ='broadcom' ,config_type_list =["pswd"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'yes', update_src ='2.2.2.1', config_type_list =["update_src"])
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2', config = 'no', update_src ='2.2.2.1', config_type_list =["update_src"])
config_bgp(dut = DUT1, local_as='100',config = 'yes',config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'no',config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'yes',addr_family ='ipv6', config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'no',addr_family ='ipv6', config_type_list =["max_path_ibgp"], max_path_ibgp ='8')
config_bgp(dut = DUT1, local_as='100',config = 'yes',addr_family ='ipv6', config_type_list =["max_path_ebgp"], max_path_ebgp ='20')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config ='yes', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100', neighbor ='192.168.3.2',config ='no', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100', neighbor ='2001::20', addr_family ='ipv6',config = 'yes', config_type_list =["routeMap"], routeMap ='map123', diRection='out')
config_bgp(dut = DUT1, local_as='100',config = 'no', removeBGP='yes', config_type_list =["removeBGP"])
config_bgp(dut = dut1,local_as = '100', neighbor = '20.20.20.2', config = 'yes', config_type_list =["nexthop_self"])
config_bgp(dut = dut1,local_as = '100', neighbor = '20.20.20.2', config = 'yes', config_type_list =["ebgp_mhop"],ebgp_mhop ='2')
"""
cli_type = get_cfg_cli_type(dut, **kwargs)
st.log('Configure BGP')
config = kwargs.get('config', "yes")
vrf_name = kwargs.get('vrf_name', "default")
router_id = kwargs.get('router_id','')
config_type_list = kwargs.get('config_type_list', None)
neighbor = kwargs.get('neighbor', None)
local_as = kwargs.get('local_as', None)
remote_as = kwargs.get('remote_as', None)
peergroup = kwargs.get('peergroup', '')
#pswd = kwargs.get('pswd', None)
#activate = kwargs.get('activate', None)
#nexthop_self = kwargs.get('nexthop_self', None)
addr_family = kwargs.get('addr_family', 'ipv4')
keepalive = kwargs.get('keepalive', '')
holdtime = kwargs.get('holdtime', '')
conf_peers = kwargs.get('conf_peers', '')
conf_identf = kwargs.get('conf_identf', '')
update_src = kwargs.get('update_src', None)
update_src_intf = kwargs.get("update_src_intf", "") if "update_src_intf" in config_type_list else ""
interface = kwargs.get('interface', None)
connect = kwargs.get('connect', None)
ebgp_mhop = kwargs.get('ebgp_mhop', None)
#failover = kwargs.get('failover', None)
shutdown = kwargs.get('shutdown', None)
#max_path = kwargs.get('max_path', None)
redistribute = kwargs.get('redistribute', None)
network = kwargs.get('network', None)
password = kwargs.get('password', None)
max_path_ibgp = kwargs.get('max_path_ibgp', None)
max_path_ebgp = kwargs.get('max_path_ebgp', None)
routeMap = kwargs.get('routeMap', None)
distribute_list = kwargs.get('distribute_list', None)
filter_list = kwargs.get('filter_list', None)
prefix_list = kwargs.get('prefix_list', None)
#import_vrf = kwargs.get('import_vrf', None)
import_vrf_name = kwargs.get('import_vrf_name', None)
#fast_external_failover = kwargs.get('fast_external_failover', None)
bgp_bestpath_selection = kwargs.get('bgp_bestpath_selection', None)
removeBGP = kwargs.get('removeBGP', 'no')
diRection = kwargs.get('diRection', 'in')
weight = kwargs.get('weight', None)
config_cmd = "" if config.lower() == 'yes' else "no"
my_cmd =''
if cli_type == "vtysh":
if 'local_as' in kwargs and removeBGP != 'yes':
if vrf_name != 'default':
my_cmd = 'router bgp {} vrf {}\n'.format(local_as, vrf_name)
else:
my_cmd = 'router bgp {}\n'.format(local_as)
if router_id != '':
my_cmd += '{} bgp router-id {}\n'.format(config_cmd, router_id)
if keepalive != '' and holdtime != '':
my_cmd += '{} timers bgp {} {}\n'.format(config_cmd, keepalive, holdtime)
if config_cmd == '':
if peergroup != '':
my_cmd += 'neighbor {} peer-group\n'.format(peergroup)
if conf_peers != '':
my_cmd += '{} bgp confederation peers {}\n'.format(config_cmd, conf_peers)
if conf_identf != '':
my_cmd += '{} bgp confederation identifier {}\n'.format(config_cmd, conf_identf)
for type1 in config_type_list:
if type1 == 'neighbor':
my_cmd += '{} neighbor {} remote-as {}\n'.format(config_cmd, neighbor, remote_as)
elif type1 == 'shutdown':
my_cmd += '{} neighbor {} shutdown\n'.format(config_cmd, neighbor)
elif type1 == 'failover':
my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'fast_external_failover':
st.log("Configuring the fast_external_failover")
my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
elif type1 == 'bgp_bestpath_selection':
st.log("Configuring bgp default bestpath selection")
my_cmd += '{} bgp bestpath {}\n'.format(config_cmd,bgp_bestpath_selection)
elif type1 == 'activate':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} activate\n'.format(config_cmd, neighbor)
elif type1 == 'nexthop_self':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} next-hop-self\n'.format(config_cmd, neighbor)
elif type1 == 'pswd':
my_cmd += '{} neighbor {} password {}\n'.format(config_cmd, neighbor, password)
elif type1 == 'update_src' or type1 == 'update_src_intf':
if update_src != None:
my_cmd += '{} neighbor {} update-source {}\n'.format(config_cmd, neighbor, update_src)
elif update_src_intf != None:
my_cmd += '{} neighbor {} update-source {}\n'.format(config_cmd, neighbor, update_src_intf)
elif type1 == 'interface':
my_cmd += '{} neighbor {} interface {}\n'.format(config_cmd, neighbor, interface)
elif type1 == 'connect':
my_cmd += '{} neighbor {} timers connect {}\n'.format(config_cmd, neighbor, connect)
elif type1 == 'ebgp_mhop':
my_cmd += '{} neighbor {} ebgp-multihop {}\n'.format(config_cmd, neighbor, ebgp_mhop)
elif type1 == 'peergroup':
my_cmd += '{} neighbor {} remote-as {}\n'.format(config_cmd, peergroup, remote_as)
if config_cmd == '':
if interface:
my_cmd += 'neighbor {} interface peer-group {}\n'.format(neighbor, peergroup)
else:
my_cmd += 'neighbor {} peer-group {}\n'.format(neighbor, peergroup)
if config_cmd == 'no':
my_cmd += '{} neighbor {} peer-group\n'.format(config_cmd, peergroup)
elif type1 == 'bfd':
if peergroup:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, peergroup)
elif interface != '' and interface != None:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, interface)
else:
my_cmd += '{} neighbor {} bfd\n'.format(config_cmd, neighbor)
elif type1 == 'max_path_ibgp':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} maximum-paths ibgp {}\n'.format(config_cmd, max_path_ibgp)
my_cmd += 'exit\n'
elif type1 == 'max_path_ebgp':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} maximum-paths {}\n'.format(config_cmd, max_path_ebgp)
my_cmd += 'exit\n'
elif type1 == 'redist':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} redistribute {}\n'.format(config_cmd, redistribute)
my_cmd += 'exit\n'
elif type1 == 'network':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} network {}\n'.format(config_cmd, network)
my_cmd += 'exit\n'
elif type1 == 'import-check':
my_cmd += '{} bgp network import-check\n'.format(config_cmd)
elif type1 == 'import_vrf':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} import vrf {} \n'.format(config_cmd, import_vrf_name)
my_cmd += 'exit\n'
elif type1 == 'routeMap':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} route-map {} {}\n'.format(config_cmd, neighbor, routeMap, diRection)
my_cmd += 'exit\n'
elif type1 == 'distribute_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} distribute-list {} {}\n'.format(config_cmd, neighbor, distribute_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'filter_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} filter-list {} {}\n'.format(config_cmd, neighbor, filter_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'prefix_list':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} prefix-list {} {}\n'.format(config_cmd, neighbor, prefix_list, diRection)
my_cmd += 'exit\n'
elif type1 == 'default_originate':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
if 'routeMap' in kwargs:
my_cmd += '{} neighbor {} default-originate route-map {}\n'.format(config_cmd, neighbor, routeMap)
else:
my_cmd += '{} neighbor {} default-originate\n'.format(config_cmd, neighbor)
my_cmd += 'exit\n'
elif type1 == 'removePrivateAs':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} remove-private-AS\n'.format(config_cmd, neighbor)
my_cmd += 'exit\n'
elif type1 == 'multipath-relax':
my_cmd += '{} bgp bestpath as-path multipath-relax \n'.format(config_cmd)
elif type1 == 'remote-as':
my_cmd += '{} neighbor {} interface remote-as {}\n'.format(config_cmd,interface,remote_as)
elif type1 == 'weight':
my_cmd += 'address-family {} unicast\n'.format(addr_family)
my_cmd += '{} neighbor {} weight {}\n'.format(config_cmd, neighbor, weight)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
else:
st.log('Invalid BGP config parameter: {}'.format(type1))
output = st.config(dut, my_cmd, type=cli_type)
if "% Configure the peer-group first" in output:
st.error(output)
return False
if "% Specify remote-as or peer-group commands first" in output:
st.error(output)
return False
if vrf_name != 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp {} vrf {}'.format(config_cmd, local_as, vrf_name)
st.config(dut, my_cmd, type=cli_type)
elif vrf_name == 'default' and removeBGP == 'yes':
if 'local_as' in kwargs:
my_cmd = '{} router bgp {}'.format(config_cmd,local_as)
else:
my_cmd = '{} router bgp'.format(config_cmd)
st.config(dut, my_cmd, type=cli_type)
elif cli_type == "klish":
commands = list()
neigh_name = get_interface_number_from_name(neighbor)
if interface:
intf_name = get_interface_number_from_name(interface)
shutdown = kwargs.get("shutdown", None) if "shutdown" in config_type_list else None
activate = kwargs.get("activate", None) if "activate" in config_type_list else None
nexthop_self = kwargs.get("nexthop_self", True) if "nexthop_self" in config_type_list else None
pswd = True if "pswd" in config_type_list else False
update_src = kwargs.get("update_src", "") if "update_src" in config_type_list else ""
update_src_intf = get_interface_number_from_name(update_src_intf)
bfd = True if "bfd" in config_type_list else False
route_map = True if "routeMap" in config_type_list else False
default_originate = True if "default_originate" in config_type_list else False
removePrivateAs = True if "removePrivateAs" in config_type_list else False
no_neighbor = "no" if kwargs.get("config") == "no" else ""
sub_list = ["neighbor", "routeMap", "shutdown", "activate", "nexthop_self", "pswd", "update_src",
"bfd", "default_originate", "removePrivateAs", "no_neigh","remote-as","filter_list",
"prefix_list", "distribute_list", "weight", "keepalive", "holdtime", "ebgp_mhop","peergroup","update_src_intf","connect"]
if 'local_as' in kwargs and removeBGP != 'yes':
if vrf_name != 'default':
my_cmd = 'router bgp {} vrf {}'.format(local_as, vrf_name)
else:
my_cmd = 'router bgp {}'.format(local_as)
commands.append(my_cmd)
if router_id:
my_cmd = '{} router-id {}'.format(config_cmd, router_id)
commands.append(my_cmd)
if peergroup:
my_cmd = '{} peer-group {}'.format(config_cmd, peergroup)
commands.append(my_cmd)
commands.append("exit")
# if conf_peers:
# my_cmd += '{} bgp confederation peers {}\n'.format(config_cmd, conf_peers)
# if conf_identf != '':
# my_cmd += '{} bgp confederation identifier {}\n'.format(config_cmd, conf_identf)
config_default_activate = True
config_remote_as = True
for type1 in config_type_list:
if type1 in sub_list:
if neigh_name and not peergroup:
if isinstance(neigh_name, dict):
my_cmd = "neighbor interface {} {}".format(neigh_name["type"],neigh_name["number"])
else:
my_cmd = "neighbor {}".format(neigh_name)
commands.append(my_cmd)
if peergroup:
my_cmd_peer = '{} peer-group {}'.format(config_cmd, peergroup)
if 'peergroup' in config_type_list:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"],
neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
if neigh_name:
commands.append(my_cmd)
commands.append(my_cmd_peer)
commands.append('exit')
neigh_name = None
activate = True
commands.append(my_cmd_peer)
if config_remote_as and remote_as:
if interface and not peergroup:
my_cmd = "neighbor interface {} {}".format(intf_name['type'], intf_name['number'])
commands.append(my_cmd)
my_cmd = '{} remote-as {}'.format(config_cmd, remote_as)
commands.append(my_cmd)
config_remote_as = False
if config_default_activate and (activate or neigh_name):
# show ip bgp summary will list
# v4 neighbor only if activate is done for v4 address family
# v6 neighbor only if activate is done for v4 address family
# both v4 and v6 neighbor only if activate is done for both address families
# There is a defect for this issue - 20468
if config_cmd == "":
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
if addr_family == "ipv6":
my_cmd = 'address-family ipv4 unicast'
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
config_default_activate = False
# Avoid disable of neighbor unless config=no and config_type_list contains activate
elif activate and config_cmd == "no":
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} activate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
activate = None
if shutdown:
my_cmd = '{} shutdown'.format(config_cmd)
commands.append(my_cmd)
shutdown = None
elif route_map:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} route-map {} {}'.format(config_cmd, routeMap, diRection)
commands.append(my_cmd)
commands.append("exit")
route_map = False
elif filter_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} filter-list {} {}'.format(config_cmd, filter_list, diRection)
commands.append(my_cmd)
commands.append("exit")
filter_list = None
elif prefix_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} prefix-list {} {}\n'.format(config_cmd, prefix_list, diRection)
commands.append(my_cmd)
commands.append("exit")
prefix_list = None
elif distribute_list:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} prefix-list {} {}\n'.format(config_cmd, distribute_list, diRection)
commands.append(my_cmd)
commands.append("exit")
distribute_list = None
elif default_originate:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
if 'routeMap' in kwargs:
my_cmd = '{} default-originate route-map {}'.format(config_cmd, routeMap)
else:
my_cmd = '{} default-originate'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
default_originate = False
elif removePrivateAs:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} remove-private-AS'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
removePrivateAs = False
elif weight:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} weight {}'.format(config_cmd, weight)
commands.append(my_cmd)
commands.append("exit")
weight = None
elif keepalive and holdtime:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} timers {} {}'.format(config_cmd, keepalive, holdtime)
commands.append(my_cmd)
keepalive = 0
holdtime = 0
elif nexthop_self:
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} next-hop-self'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
nexthop_self = None
elif pswd:
password = "" if config_cmd== 'no' else password
my_cmd = '{} password {}'.format(config_cmd, password)
commands.append(my_cmd)
pswd = False
elif update_src:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} update-source {}'.format(config_cmd, update_src)
commands.append(my_cmd)
update_src = None
elif update_src_intf:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
if isinstance(update_src_intf, dict):
my_cmd = '{} update-source interface {} {}'.format(config_cmd, update_src_intf['type'],update_src_intf['number'])
commands.append(my_cmd)
update_src_intf = None
elif ebgp_mhop:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(no_neighbor, neigh_name["type"], neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(no_neighbor, neigh_name)
my_cmd = '{} ebgp-multihop {}'.format(config_cmd, ebgp_mhop)
commands.append(my_cmd)
ebgp_mhop = None
elif bfd:
if interface and remote_as:
my_cmd = "neighbor interface {}".format(interface)
commands.append(my_cmd)
elif neighbor and not interface and remote_as:
my_cmd = "neighbor {}".format(neighbor)
commands.append(my_cmd)
my_cmd = "remote-as {}".format(remote_as)
commands.append(my_cmd)
my_cmd = '{} bfd'.format(config_cmd)
commands.append(my_cmd)
bfd = False
elif connect:
my_cmd = '{} timers connect {}'.format(config_cmd, connect)
commands.append(my_cmd)
connect = None
st.log('config_bgp command_list: {}'.format(commands))
#come back to router bgp context
commands.append("exit")
# elif type1 == 'failover':
# my_cmd += '{} bgp fast-external-failover\n'.format(config_cmd)
# elif type1 == 'router_id':
# st.log("Configuring the router-id on the device")
elif type1 == 'fast_external_failover':
st.log("Configuring the fast_external_failover")
my_cmd = '{} fast-external-failover'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'bgp_bestpath_selection':
st.log("Configuring bgp default bestpath selection")
my_cmd = '{} bestpath {}'.format(config_cmd, bgp_bestpath_selection)
commands.append(my_cmd)
# elif type1 == 'interface':
# my_cmd += '{} neighbor {} interface {}\n'.format(config_cmd, neighbor, interface)
# elif type1 == 'connect':
# my_cmd += '{} neighbor {} timers connect {}\n'.format(config_cmd, neighbor, connect)
elif type1 == 'max_path_ibgp':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} maximum-paths ibgp {}'.format(config_cmd, max_path_ibgp)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'max_path_ebgp':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
if config_cmd == '' or config_cmd == 'yes':
my_cmd = '{} maximum-paths {}'.format(config_cmd, max_path_ebgp)
else:
my_cmd = '{} maximum-paths'.format(config_cmd)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'redist':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} redistribute {}'.format(config_cmd, redistribute)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'network':
my_cmd = 'address-family {} unicast'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} network {}'.format(config_cmd, network)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'import-check':
my_cmd = '{} network import-check'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'import_vrf':
my_cmd = 'address-family {} unicast\n'.format(addr_family)
commands.append(my_cmd)
my_cmd = '{} import vrf {}'.format(config_cmd, import_vrf_name)
commands.append(my_cmd)
commands.append("exit")
elif type1 == 'multipath-relax':
my_cmd = '{} bestpath as-path multipath-relax'.format(config_cmd)
commands.append(my_cmd)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'peer_group':
st.log("Configuring the peer_group on the device")
else:
st.log('Invalid BGP config parameter')
if config_cmd == 'no' and 'neighbor' in config_type_list and neigh_name and not peergroup:
if isinstance(neigh_name, dict):
my_cmd = "{} neighbor interface {} {}".format(config_cmd, neigh_name["type"],neigh_name["number"])
else:
my_cmd = "{} neighbor {}".format(config_cmd, neigh_name)
commands.append(my_cmd)
# commands.append("exit")
#go back to config terminal prompt
if removeBGP != 'yes':
commands.append('exit\n')
if commands:
cli_output = st.config(dut, commands, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
if vrf_name != 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp vrf {}'.format(config_cmd, vrf_name)
cli_output = st.config(dut, my_cmd, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
elif vrf_name == 'default' and removeBGP == 'yes':
my_cmd = '{} router bgp'.format(config_cmd)
cli_output = st.config(dut, my_cmd, type=cli_type, skip_error_check=True)
fail_on_error(cli_output)
elif cli_type in ["rest-patch", "rest-put"]:
shutdown = kwargs.get("shutdown", None) if "shutdown" in config_type_list else None
activate = kwargs.get("activate", None) if "activate" in config_type_list else None
nexthop_self = kwargs.get("nexthop_self", True) if "nexthop_self" in config_type_list else None
pswd = True if "pswd" in config_type_list else False
update_src = kwargs.get("update_src", "") if "update_src" in config_type_list else ""
update_src_intf = get_interface_number_from_name(update_src_intf)
bfd = True if "bfd" in config_type_list else False
route_map = True if "routeMap" in config_type_list else False
default_originate = True if "default_originate" in config_type_list else False
removePrivateAs = True if "removePrivateAs" in config_type_list else False
#no_neighbor = "no" if kwargs.get("config") == "no" else ""
sub_list = ["neighbor", "routeMap", "shutdown", "activate", "nexthop_self", "pswd", "update_src",
"bfd", "default_originate", "removePrivateAs", "no_neigh", "remote-as", "filter_list",
"prefix_list", "distribute_list", "weight", "keepalive", "holdtime", "ebgp_mhop", "peergroup",
"update_src_intf", "connect"]
bgp_data = dict()
bgp_data["openconfig-network-instance:bgp"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"] = list()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["confederation"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["dynamic-neighbor-prefixes"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["dynamic-neighbor-prefixes"][
"dynamic-neighbor-prefixe"] = list()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:bgp-ext-route-reflector"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:global-defaults"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:update-delay"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["openconfig-bgp-ext:max-med"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"] = list()
bgp_data["openconfig-network-instance:bgp"]["peer-groups"] = dict()
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"] = list()
delete_urls = [] # All the delete URLS should be appended to this list
# neigh_name = get_interface_number_from_name(neighbor)
family = kwargs.get('family', None)
if family == "ipv4":
afi_safi_name = "openconfig-bgp-types:IPV4_UNICAST"
else:
afi_safi_name = "openconfig-bgp-types:IPV6_UNICAST"
if 'local_as' in kwargs and removeBGP != 'yes':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"]["as"] = kwargs.get("local_as")
if router_id:
bgp_data["openconfig-network-instance:bgp"]["global"]["config"]["router-id"] = router_id
if peergroup:
# print(bgp_data)
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['config'] = dict()
peer_data["config"].update({'peer-group-address': peergroup})
config_default_activate = True
config_remote_as = True
neigh_data_sub = dict()
for type1 in config_type_list:
if type1 in sub_list:
if neighbor and not peergroup:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if peergroup:
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['config'] = dict()
peer_data["config"].update({'peer-group-address': peergroup})
if 'peergroup' in config_type_list:
# if isinstance(neigh_name, dict):
if activate and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
# delete_rest(dut, rest_url=url)
if not delete_rest(dut, rest_url=url):
st.error("neighbor is failed")
else:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if neighbor:
neigh_data1 = dict()
neigh_data1["afi-safis"] = dict()
neigh_data1["afi-safis"]["afi-safi"] = list()
neigh_data1_sub = dict()
neigh_data1_sub.update({"afi-safi-name": afi_safi_name})
neigh_data1_sub["config"] = dict()
neigh_data1_sub["config"].update({"afi-safi-name": "afi-safi-name", "enabled": True})
neigh_data1.update(neigh_data1_sub)
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data1)
if config_remote_as and remote_as:
if interface and not peergroup:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor, "peer-type": remote_as})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
config_remote_as = False
if config_default_activate and (activate or neighbor):
if config_cmd == "":
family = kwargs.get('family', None)
if family == "ipv4":
afi_safi_name = "openconfig-bgp-types:IPV4_UNICAST"
else:
afi_safi_name = "openconfig-bgp-types:IPV6_UNICAST"
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif activate and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
activate = None
if shutdown:
neigh_data["config"].update({"enabled": False})
shutdown = None
elif route_map:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": "afi-safi-name"})
neigh_data_sub["config"] = dict()
neigh_data_sub["config"].update({"afi-safi-name": "afi-safi-name"})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
neigh_data_sub["apply-policy"] = dict()
neigh_data_sub["apply-policy"]["config"] = dict()
neigh_data_sub["apply-policy"]["config"].update({"import-policy": ["route-map"]})
route_map = False
elif filter_list:
neigh_data_sub["openconfig-bgp-ext:filter-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:filter-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:filter-list"]["config"].update({"import-policy": "filter-list"})
filter_list = None
elif prefix_list:
neigh_data_sub["openconfig-bgp-ext:prefix-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"].update({"import-policy": "prefix-list"})
prefix_list = None
elif distribute_list:
neigh_data_sub["openconfig-bgp-ext:prefix-list"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:prefix-list"]["config"].update({"import-policy": "prefix-list"})
distribute_list = None
elif default_originate:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data["ipv4-unicast"] = dict()
afi_data["ipv4-unicast"]["config"] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
if 'routeMap' in kwargs:
afi_data["ipv4-unicast"]["config"].update({"send-default-route": True, "openconfig-bgp-ext:default-policy-name": "routeMap"})
else:
afi_data["ipv4-unicast"]["config"].update({"send-default-route": True})
default_originate = False
elif removePrivateAs:
neigh_data_sub["openconfig-bgp-ext:remove-private-as"] = dict()
neigh_data_sub["openconfig-bgp-ext:remove-private-as"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:remove-private-as"]["config"].update({"enabled": True})
removePrivateAs = False
elif weight:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
neigh_data = dict()
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": afi_safi_name})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
neigh_data_sub['config'] = dict()
neigh_data_sub['config'].update(
{"afi-safi-name": afi_safi_name, "enabled": True, "openconfig-bgp-ext:weight": 0})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
bgp_data["openconfig-network-instance:bgp"]["global"]["neighbors"]["neighbor"].append(neigh_data)
weight = None
elif keepalive and holdtime:
if isinstance(neighbor, dict):
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
neigh_data["timers"] = dict()
neigh_data["timers"]["config"] = dict()
neigh_data["timers"]["config"].update({"hold-time": holdtime, "keepalive-interval": keepalive})
keepalive = 0
holdtime = 0
elif nexthop_self:
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
neigh_data = dict()
neigh_data.update({"neighbor-address": "string"})
neigh_data["afi-safis"] = dict()
neigh_data["afi-safis"]["afi-safi"] = list()
neigh_data_sub = dict()
neigh_data_sub.update({"afi-safi-name": afi_safi_name})
neigh_data_sub["openconfig-bgp-ext:next-hop-self"] = dict()
neigh_data_sub["openconfig-bgp-ext:next-hop-self"]["config"] = dict()
neigh_data_sub["openconfig-bgp-ext:next-hop-self"]["config"].update(
{"enabled": True, "force": True})
neigh_data["afi-safis"]["afi-safi"].append(neigh_data_sub)
bgp_data["openconfig-network-instance:bgp"]["global"]["neighbors"]["neighbor"].append(neigh_data)
nexthop_self = None
elif pswd:
password = "" if config_cmd == 'no' else password
if password:
neigh_data["openconfig-bgp-ext:auth-password"] = dict()
neigh_data["openconfig-bgp-ext:auth-password"]["config"] = dict()
neigh_data["openconfig-bgp-ext:auth-password"]["config"].update({"password": password})
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
pswd = False
elif update_src:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
update_src = None
elif update_src_intf:
if neighbor:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
if update_src_intf:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
neigh_data["transport"] = dict()
neigh_data["transport"]["config"] = dict()
neigh_data.update({"local-address": update_src_intf})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
update_src_intf = None
elif ebgp_mhop:
if neighbor:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
else:
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
delete_urls.append(url.format("default", neighbor))
if delete_urls:
for url in delete_urls:
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
neigh_data["ebgp-multihop"] = dict()
neigh_data["ebgp-multihop"]["config"] = dict()
neigh_data.update({"multihop-ttl": ebgp_mhop})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
ebgp_mhop = None
elif bfd:
if interface and remote_as:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
elif neighbor and not interface and remote_as:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["config"].update({"neighbor-address": neighbor, "peer-type": remote_as})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
neigh_data["openconfig-bfd:enable-bfd"] = dict()
neigh_data["openconfig-bfd:enable-bfd"]["config"] = dict()
neigh_data.update({"enabled": True})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
bfd = False
elif connect:
peer_data = dict()
peer_data.update({'peer-group-address': peergroup})
bgp_data["openconfig-network-instance:bgp"]["peer-groups"]["peer-group"].append(peer_data)
peer_data['timers'] = dict()
peer_data['timers']["config"] = dict()
peer_data.update({"connect-retry": 10})
connect = None
elif type1 == 'fast_external_failover':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update(
{"openconfig-bgp-ext:fast-external-failover": True})
elif type1 == 'bgp_bestpath_selection':
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"]["config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["route-selection-options"]["config"].update(
{"external-compare-router-id": True})
elif type1 == 'max_path_ibgp':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name, "enabled": True})
afi_data["use-multiple-paths"] = dict()
afi_data["use-multiple-paths"]["ibgp"] = dict()
afi_data["use-multiple-paths"]["ibgp"]["config"] = dict()
afi_data["use-multiple-paths"]["ibgp"]["config"].update({"maximum-paths": type1})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif type1 == 'max_path_ebgp':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": 'afi-safi-name'})
if config_cmd == '' or config_cmd == 'yes':
afi_data["use-multiple-paths"] = dict()
afi_data["use-multiple-paths"]["ebgp"] = dict()
afi_data["use-multiple-paths"]["ebgp"]["config"] = dict()
afi_data["use-multiple-paths"]["ebgp"]["config"].update({"maximum-paths": type1})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
else:
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"][
"config"] = dict()
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"]["ebgp"][
"config"].update({"allow-multiple-as": True})
elif type1 == 'redist':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
# my_cmd = '{} redistribute {}'.format(config_cmd, redistribute) # SW defect is there
elif type1 == 'network':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
afi_data["openconfig-bgp-ext:network-config"] = dict()
afi_data["openconfig-bgp-ext:network-config"]["network"] = list()
obe_data = dict()
obe_data.update({"prefix": "string"})
obe_data["config"] = dict()
obe_data.update({"prefix": "network"})
afi_data["openconfig-bgp-ext:network-config"]["network"].append(obe_data)
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
elif type1 == 'import-check':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update({"openconfig-bgp-ext:network-import-check": True})
elif type1 == 'import_vrf':
afi_data = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
bgp_data["openconfig-network-instance:bgp"]["global"]["afi-safis"]["afi-safi"].append(afi_data)
afi_data['config'] = dict()
afi_data.update({"afi-safi-name": afi_safi_name})
afi_data["openconfig-bgp-ext:import-network-instance"] = dict()
afi_data["openconfig-bgp-ext:import-network-instance"]["config"] = dict()
afi_data["openconfig-bgp-ext:import-network-instance"]["config"].update({"name": "import_vrf"})
elif type1 == 'multipath-relax':
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"] = dict()
ump_data = dict()
ump_data["ebpg"] = dict()
ump_data["ebpg"]["config"] = dict()
ump_data["ebpg"]["config"].update({"allow-multiple-as": True})
bgp_data["openconfig-network-instance:bgp"]["global"]["use-multiple-paths"].update(ump_data)
elif type1 == 'removeBGP':
st.log("Removing the bgp config from the device")
elif type1 == 'router_id':
st.log("Configuring the router-id on the device")
elif type1 == 'peer_group':
st.log("Configuring the peer_group on the device")
else:
st.log('Invalid BGP config parameter')
if config_cmd == 'no' and 'neighbor' in config_type_list and neighbor and not peergroup:
#if isinstance(neigh_name, dict):
if neighbor and config_cmd == "no":
url = st.get_datastore(dut, "rest_urls")['bgp_del_neighbor_config']
if delete_urls:
for url in delete_urls:
delete_rest(dut, rest_url=url.format("default", neighbor))
if not delete_rest(dut, rest_url=url.format("default", neighbor)):
st.error("neighbor is failed")
else:
neigh_data = dict()
neigh_data.update({"neighbor-address": neighbor})
neigh_data["config"] = dict()
neigh_data["config"].update({"neighbor-address": neighbor})
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data)
if neighbor:
neigh_data1 = dict()
neigh_data1["afi-safis"] = dict()
neigh_data1["afi-safis"]["afi-safi"] = list()
neigh_data1_sub = dict()
neigh_data1_sub.update({"afi-safi-name": afi_safi_name})
neigh_data1_sub["config"] = dict()
neigh_data1_sub["config"].update({"afi-safi-name": "afi-safi-name", "enabled": True})
neigh_data1.update(neigh_data1_sub)
bgp_data["openconfig-network-instance:bgp"]["neighbors"]["neighbor"].append(neigh_data1)
if vrf_name != 'default' and removeBGP == 'yes':
bgp_data["openconfig-network-instance:bgp"]["global"]["config"].update({"as": 0})
else:
st.log("Unsupported CLI TYPE - {}".format(cli_type))
return False
|
744e96daded519308dfc7a5c3d665ba94cfc6bc5
| 35,428 |
def get_single_notification(session, notification_id):
"""Helper method to extract a single notification from notification table."""
return session.execute(text("""SELECT *
FROM public.notification
WHERE id='{0}'""".format(notification_id))).fetchone()
|
9041a3c91806764379997d79c5355cc84463f57c
| 35,429 |
import re
def register():
"""Registration form handler."""
def fail_validate(msg):
flask.flash(msg, 'danger')
return flask.redirect(flask.url_for('home'))
username = request.form.get('username', '')
if not re.match(r'[A-Za-z0-9_]+$', username):
return fail_validate('Invalid username.')
if models.User.query.get(username):
return fail_validate('User exists.')
user = models.User()
user.username = username
user.password = request.form.get('password')
user.email = request.form.get('email', '')
if models.User.query.filter(models.User.email == user.email).count():
return fail_validate('User exists with that email address.')
models.db.session.add(user)
models.db.session.commit()
session['user'] = user.username
return flask.redirect(flask.url_for('catalog'))
|
940f0478e17a4e02abdd6c8fded7d683a5c93f34
| 35,430 |
def default_reply(event, message):
"""Default function called to reply to bot commands."""
return event.unotice(message)
|
3c83d8abaea0f4c968db25fff51185bb6c32d26e
| 35,431 |
async def list_products():
"""API for listing all the products."""
return await paginate(ProductGinoModel.query)
|
7befee2e20a2b849bbde7ddee521b8b935f6930d
| 35,432 |
def shortest_path_between_atoms(gra, key1, key2):
""" shortest path between a pair of atoms
"""
return shortest_path_between_groups(gra, [key1], [key2])
|
0af0439794db7d6a2025fe28782ceff2da6575f3
| 35,433 |
import copy
def _safe_divide(num, denom, replace=0):
"""Safe division when elements in the denominator might be zeros.
Returns the division of the numerator by the denominator, but replaces
results which have a zero in the denominator by a specified value. The
default is to replace bad divisions with zeros.
"""
#consider moving to a utility module; copy over tests from colorunittests.py
num = _to_npa(num)
denom = _to_npa(denom)
assert(num.shape == denom.shape)
zero_flag = denom == 0.0
if zero_flag.any():
denom_copy = copy.copy(denom)
denom_copy[zero_flag] = 1.0
div = num / denom_copy
div[zero_flag] = replace
else:
div = num / denom
return div
|
14a1f42104b98dccf865de7e6c3e17892f8aeb65
| 35,434 |
def making_change(amt: int, coins: list) -> int:
"""Iterative implementation of the making change algorithm.
:param amt (int) : Amount, in cents, to be made into change.
:param coins (list) : List of coin denominations
:return (int) : Number of different combinations of change.
"""
# calc[i] represents the number of ways to get to amount i
calc = [0] * (amt + 1)
# 1 way to get zero
calc[0] = 1
# Pick all coins one by one and update calc[] values after the
# index greater than or equal to the value of the picked coin
for coin_val in coins:
for j in range(coin_val, amt + 1):
calc[j] += calc[j - coin_val]
return calc[amt]
|
188496f5db4252fa27f153d0a0379031847c669d
| 35,435 |
from io import StringIO
def pdf_to_text(pdf):
"""Return extracted text from PDF.
Warning: This function can be slow... up to 300ms per page
This function does not perform optical character recognition.
Args:
pdf: bytestring of PDF file
Returns:
str of text extracted from `pdf` contents.
"""
# make input and output buffers
in_buffer = StringIO.StringIO(pdf)
out_buffer = StringIO.StringIO()
# configure pdf parser
parser = pdfparser.PDFParser(in_buffer)
doc = pdfparser.PDFDocument()
parser.set_document(doc)
doc.set_parser(parser)
doc.initialize(password='')
rsrcmgr = pdfinterp.PDFResourceManager()
laparams = layout.LAParams()
# convert pdf to text
device = converter.TextConverter(
rsrcmgr, outfp=out_buffer, codec='utf-8', laparams=laparams)
interpreter = pdfinterp.PDFPageInterpreter(rsrcmgr, device)
for page in doc.get_pages():
interpreter.process_page(page)
return out_buffer.getvalue()
|
46f1b186a73a929f3053b35f94428e30804ab907
| 35,436 |
def pendulum_sunny(p):
"""sunny constraint for gravity Pendulum"""
return np.abs(p[:, 1])<0.5
|
85fa7f94e803aac0d291030837f048d933283035
| 35,437 |
def PGetSkyModel (inUVSelfCal):
""" Return the member sky model
returns ImageMosaic
inUVSelfCal = Python UVSelfCal object
"""
################################################################
# Checks
if not PIsA(inUVSelfCal):
raise TypeError("inUVSelfCal MUST be a Python Obit UVSelfCal")
#
out = SkyModel.SkyModel("None")
out.me = Obit.UVSelfCalGetSkyModel(inUVSelfCal.me)
return out
# end PGetSkyModel
|
d1dc418e46074958684c04b7431ee84ec0260f4b
| 35,438 |
def CGaussFilter_DImage_getGaussianFuncValue(dX, dY, dSigma):
"""CGaussFilter_DImage_getGaussianFuncValue(dX, dY, dSigma) -> double"""
return _ImageFilters.CGaussFilter_DImage_getGaussianFuncValue(dX, dY, dSigma)
|
eeaf4dd66c795b6803efae2a386301c04e3e05b6
| 35,439 |
def get_credstash_config(key):
"""Retrieves a single secret from AWS via credstash, assumes the
string returned is a list of newline-separated export statements
export FOO='some-hush-hush-info-here'
export BAR='some-other-secret-here'
and parses these lines into a configuration dictionary.
Args:
key: A string naming the credstash secret to retrieve, which
can optionally include a version number.
Returns:
A dict with keys for each of of the environment variables
described in the secret's contents. The dict keys
will be returned in upper case, as a convention.
{'FOO': 'some-hush-hush-info-here'
'BAR': 'some-other-secret-here'}
Blank lines, lines that do not start with 'export ', and lines
that do not contain '=' are ignored.
The returned dictionary might be empty, so use safe methods
to .get() the keys you were expecting to find in it.
Raises:
credstash.ItemNotFound: provided key not found by credstash
SyntaxError: parsing of secret failed
ValueError: parsing of secret failed
"""
try:
raw_secrets = credstash.getSecret(key)
except credstash.ItemNotFound:
raise
# assume raw secret is a multiline string of export statements e.g.
# export FOO=bar\nexport BAZ=goo
try:
# get the interesting, non-blank, parsable-looking lines into a list
parsed_secrets = [
line.replace("export ", "")
for line in raw_secrets.split("\n")
if line.startswith("export ") and "=" in line
]
# parse those KEY=value lines into a dict, splitting only on first '='
config_secrets = {}
config_secrets = dict(line.split("=", 1) for line in parsed_secrets)
# make sure all keys are uppercase
upper_config_secrets = {}
for k, val in config_secrets.items():
upper_config_secrets[k.upper()] = val
except (SyntaxError, ValueError):
raise
return upper_config_secrets
|
afa2a858098860edc6179a046302119eee305b49
| 35,440 |
from typing import List
def aggregate_stats(stats: List[deephol_stat_pb2.ProofStat]
) -> deephol_stat_pb2.ProofAggregateStat:
"""Merge a list of proof log statistics.
Args:
stats: List of individual proof log statistics.
Returns:
Aggregated proof statistics.
"""
result = deephol_stat_pb2.ProofAggregateStat()
for stat in stats:
merge_stat(result, stat)
return result
|
f15be64b3a2d8fb811aeeea2ccd57f5baa092e63
| 35,441 |
from typing import Optional
from typing import Mapping
from typing import Any
from typing import List
from pathlib import Path
def get_requires_for_build_wheel(
config_settings: Optional[Mapping[str, Any]] = None
) -> List[str]:
"""
Returns an additional list of requirements for building, as PEP508 strings,
above and beyond those specified in the pyproject.toml file.
When C-extension build is needed, setuptools should be required, otherwise
just return an empty list.
"""
meta = Metadata(Path("pyproject.toml"))
if meta.build:
return ["setuptools>=40.8.0"]
else:
return []
|
3681a08ec22887baa4aafac592059512442913a3
| 35,443 |
def fisher(high_vals: pd.Series, low_vals: pd.Series, length: int = 14) -> pd.DataFrame:
"""Fisher Transform
Parameters
----------
high_vals: pd.Series
High values
low_vals: pd.Series
Low values
length: int
Length for indicator window
Returns
----------
df_ta: pd.DataFrame
Dataframe of technical indicator
"""
# Daily
return pd.DataFrame(ta.fisher(high=high_vals, low=low_vals, length=length).dropna())
|
9d6c8ff0d76c6121f418768503b68e7b596dcc89
| 35,444 |
def is_extension_supported(request, extension_alias):
"""Check if a specified extension is supported.
:param request: django request object
:param extension_alias: neutron extension alias
"""
extensions = list_extensions(request)
for extension in extensions:
if extension['alias'] == extension_alias:
return True
else:
return False
|
f8886d992724ef4ad0aa9c9c9ac25361b489d7dd
| 35,445 |
def _linear_wcs_fit(params, lon, lat, x, y, w_obj): # pragma: no cover
"""
Objective function for fitting linear terms.
Parameters
----------
params : array
6 element array. First 4 elements are PC matrix, last 2 are CRPIX.
lon, lat: array
Sky coordinates.
x, y: array
Pixel coordinates
w_obj: `~astropy.wcs.WCS`
WCS object
"""
cd = params[0:4]
crpix = params[4:6]
w_obj.wcs.cd = ((cd[0], cd[1]), (cd[2], cd[3]))
w_obj.wcs.crpix = crpix
lon2, lat2 = w_obj.wcs_pix2world(x, y, 0)
resids = np.concatenate((lon-lon2, lat-lat2))
resids[resids > 180] = 360 - resids[resids > 180]
resids[resids < -180] = 360 + resids[resids < -180]
return resids
|
ac20744d7c52112290711a02a5c14a64d9525108
| 35,446 |
from imblearn.over_sampling import RandomOverSampler
def resample(feature_index, labels, balance='auto'):
"""use oversampling to balance class, after split of training set."""
ros = RandomOverSampler(ratio=balance)
feature_index = np.array(feature_index).reshape(-1, 1)
resampled_index, _ = ros.fit_sample(feature_index, labels)
resampled_index = [i for nested in resampled_index for i in nested]
return resampled_index
|
4a061fe44da53caf6263a66042398562ac8149f0
| 35,447 |
from typing import Callable
def dal_resolver(ctx, request_path):
"""
This function resolves a dal method call to its underlying
service
"""
service_or_method = None
for (path, service_or_method) in resolve(ctx, request_path):
if not (
service_or_method and isinstance(service_or_method, (Callable, Service))
):
raise DalMethodError(path=path)
if not service_or_method:
raise DalMethodError(request_path)
return service_or_method
|
3d5fb3bd019988ec43ca3d4514c8df2f7935287d
| 35,448 |
import re
import operator
import itertools
def guess_key_size(ciphertext, max_key_size=40):
"""Given sentence xored with short key, guess key size
From: http://trustedsignal.blogspot.com/2015/06/xord-play-normalized-hamming-distance.html
Args:
ciphertext(string)
max_key_size(int)
Returns:
list: sorted list of tuples (key_size, probability),
note that most probable key size not necessary have the largest probability
"""
if not max_key_size:
max_key_size = len(ciphertext)/4
result = {}
for key_size in range(1, max_key_size):
blocks = re.findall('.' * key_size, ciphertext, re.DOTALL)
if len(blocks) < 2:
break
diff = i = 0
while i < len(blocks) - 1:
if len(blocks[i]) != len(blocks[i + 1]): # not full-length block
break
diff += hamming_distance(blocks[i], blocks[i + 1])
i += 1
result[key_size] = diff / float(i) # average
result[key_size] /= float(key_size) # normalize
result = sorted(list(result.items()), key=operator.itemgetter(1))
# now part from given link, case one
# gcd12 = gcd(result[0][0], result[1][0])
# gcd13 = gcd(result[0][0], result[2][0])
# gcd23 = gcd(result[1][0], result[2][0])
# print gcd12, gcd13, gcd23
# if (gcd12 != 1) and (gcd12 in [x[0] for x in result[:5]]):
# if (gcd12 == gcd13 and gcd12 == gcd23) or (gcd12 == result[0][0] or gcd12 == result[1][0]):
# #remove key_size == gcd12 from result list and add it to the beginning
# for x in result:
# if x[0] == gcd12:
# result.remove(x)
# break
# result[0] == (gcd12, 1.0)
# from link, case two; yep, black magic it is
gcd_frequencies = defaultdict(lambda: 0)
for gcd_pairs in itertools.combinations(result[:10], 2):
gcd_tmp = gcd(gcd_pairs[0][0], gcd_pairs[1][0])
gcd_frequencies[gcd_tmp] += 1
gcd_frequencies = sorted(list(gcd_frequencies.items()), key=operator.itemgetter(1), reverse=True)
key_sizes = [x[0] for x in result[:10]]
distances = [x[1] for x in result[:10]]
for guessed_most_probable_key_size in gcd_frequencies[:5]:
if guessed_most_probable_key_size[0] != 1 and guessed_most_probable_key_size[1] != 0 and \
guessed_most_probable_key_size[0] in key_sizes:
gmks_position = result[key_sizes.index(guessed_most_probable_key_size[0])]
if gmks_position[1] < max(distances):
result.remove(gmks_position)
result = [gmks_position] + result
log.info("Guessed key size: {}".format(result))
return result
|
99977ba81ebcb2f2e581164753f7ab363a9fd2e9
| 35,449 |
def AddWorkerpoolCreateArgs(parser):
"""Set up all the argparse flags for creating a workerpool.
Args:
parser: An argparse.ArgumentParser-like object.
Returns:
The parser argument with workerpool flags added in.
"""
return AddWorkerpoolArgs(parser, update=False)
|
db86e56ee95feb4b71cf2e209820e85e2985c688
| 35,450 |
def table_dispatch(kind, table, body):
"""Call body with table[kind] if it exists. Raise an error otherwise."""
if kind in table:
return body(table[kind])
else:
raise BaseException, "don't know how to handle a histogram of kind %s" % kind
|
18d827baeabbca8d27848ea87a067328fe82d16a
| 35,451 |
def compute_dot_sim_matrix(node_features):
"""Compute edge scores with dot product."""
sim = tf.matmul(node_features, tf.transpose(node_features, perm=[1, 0]))
return sim
|
7eb1e67a01f29d064f9e21ffdbdaf516b42b35a4
| 35,452 |
import pickle
def load_param_file(filename):
"""
Loads a saved parameter dictionary from filename.
"""
return pickle.load(open(filename))
|
1a6de4fba9f55bf6cbd108f4613f3778094ab410
| 35,453 |
def strip_block_comments(text: str) -> str:
""" Remove any block-style comments from a text. """
return strip_comments(text, [(COMMENT_BLOCK_PATTERN, None)])
|
4389bb137aa3fde7a033113646327920118911ca
| 35,455 |
import torch
def optim_solver(grad, diff, radius, device, gamma=2):
"""
Solver for the optimization problem presented in Proposition 1 in
https://arxiv.org/abs/2002.12718
"""
lamda, mhlnbs_dis = compute_mahalanobis_distance(grad, diff, radius, device, gamma)
lamda_lower_limit = range_lamda_lower(grad).detach().cpu().numpy()
nu_upper_limit = range_nu_upper(grad, mhlnbs_dis, radius, gamma).detach().cpu().numpy()
#num of values of lamda and nu samples in the allowed range
num_rand_samples = 40
final_lamda = torch.zeros((grad.shape[0],1))
#Solve optim for each example in the batch
for idx in range(lamda.shape[0]):
#Optim corresponding to mahalanobis dis < radius
if lamda[idx] == 1:
min_left = np.inf
best_lam = 0
for k in range(num_rand_samples):
val = np.random.uniform(low = lamda_lower_limit[idx], high = 0)
left_val = check_right_part1(val, grad[idx], diff[idx], radius, device)
if left_val < min_left:
min_left = left_val
best_lam = val
final_lamda[idx] = best_lam
#Optim corresponding to mahalanobis dis > gamma * radius
elif lamda[idx] == 2:
min_left = np.inf
best_lam = np.inf
for k in range(num_rand_samples):
val = np.random.uniform(low = 0, high = nu_upper_limit[idx])
left_val = check_right_part2(val, grad[idx], diff[idx], radius, device, gamma)
if left_val < min_left:
min_left = left_val
best_lam = val
final_lamda[idx] = 1.0/best_lam
else:
final_lamda[idx] = 0
final_lamda = final_lamda.to(device)
for j in range(diff.shape[0]):
diff[j,:] = diff[j,:]/(1+final_lamda[j]*grad[j,:])
return diff
|
20971bea92ca13caa4df6204e24750c616fd4805
| 35,456 |
def send_sms(domain, contact, phone_number, text, metadata=None, logged_subevent=None):
"""
Sends an outbound SMS. Returns false if it fails.
"""
if phone_number is None:
return False
if isinstance(phone_number, int):
phone_number = str(phone_number)
phone_number = clean_phone_number(phone_number)
msg = get_sms_class()(
domain=domain,
phone_number=phone_number,
direction=OUTGOING,
date=get_utcnow(),
backend_id=None,
location_id=get_location_id_by_contact(domain, contact),
text = text
)
if contact:
msg.couch_recipient = contact.get_id
msg.couch_recipient_doc_type = contact.doc_type
if domain and contact and is_commcarecase(contact):
backend_name = contact.get_case_property('contact_backend_id')
backend_name = backend_name.strip() if isinstance(backend_name, str) else ''
if backend_name:
try:
backend = SQLMobileBackend.load_by_name(SQLMobileBackend.SMS, domain, backend_name)
except BadSMSConfigException as e:
if logged_subevent:
logged_subevent.error(MessagingEvent.ERROR_GATEWAY_NOT_FOUND,
additional_error_text=str(e))
return False
msg.backend_id = backend.couch_id
add_msg_tags(msg, metadata)
return queue_outgoing_sms(msg)
|
751111ce8db4d07d1021b30cd295b7062b6209a4
| 35,457 |
def u_diff(Exact, U_pred, x, t, nu, beta, rho, seed, layers, N_f, L, source, lr, u0_str, system, path, relative_error = False):
"""Visualize abs(u_pred - u_exact)."""
fig = plt.figure(figsize=(9, 5))
ax = fig.add_subplot(111)
if relative_error:
h = ax.imshow(np.abs(Exact.T - U_pred.T)/np.abs(Exact.T), interpolation='nearest', cmap='binary',
extent=[t.min(), t.max(), x.min(), x.max()],
origin='lower', aspect='auto')
else:
h = ax.imshow(np.abs(Exact.T - U_pred.T), interpolation='nearest', cmap='binary',
extent=[t.min(), t.max(), x.min(), x.max()],
origin='lower', aspect='auto')
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.10)
cbar = fig.colorbar(h, cax=cax)
cbar.ax.tick_params(labelsize=15)
line = np.linspace(x.min(), x.max(), 2)[:,None]
ax.set_xlabel('t', fontweight='bold', size=30)
ax.set_ylabel('x', fontweight='bold', size=30)
ax.legend(
loc='upper center',
bbox_to_anchor=(0.9, -0.05),
ncol=5,
frameon=False,
prop={'size': 15}
)
ax.tick_params(labelsize=15)
plt.savefig(f"{path}/udiff_{system}_nu{nu}_beta{beta}_rho{rho}_Nf{N_f}_{layers}_L{L}_seed{seed}_source{source}_{u0_str}_lr{lr}.pdf", bbox_inches='tight')
return None
|
cd08143e645da60be284520af4db8f6f1eef4be8
| 35,458 |
def boundary_condition():
"""
Factory associated with DirichletBC.
"""
return DirichletBC()
|
d547a86687bab9f2c87ca28f0f1eb5429f4892d7
| 35,460 |
def upgrade_available():
"""
Detect if a new kernel version is available in the repositories.
Returns True if a new kernel is available, False otherwise.
CLI Example:
.. code-block:: bash
salt '*' kernelpkg.upgrade_available
"""
return _LooseVersion(latest_available()) > _LooseVersion(latest_installed())
|
95d7f6558d060f64066a56b38336981a515e1dba
| 35,461 |
def ss_error(observed_values, estimated_values):
"""Sum of squared error function."""
sse = np.sum((observed_values - estimated_values) ** 2)
return sse
|
f049643ac58e3cac2fc436b75508976121adaff7
| 35,463 |
def _generate_gherkin_feature_files(
gherkin_templates: list, properties_list: list[dict]
) -> list[TemplateOutputFile]:
"""
Compile templates with variable properties information.
Args:
gherkin_templates: templates to generate against. (Should only be one template)
properties_list: a list of template property dictionaries
Returns:
list of template information dictionaries
"""
def generate_file(properties: dict) -> TemplateOutputFile:
feature_name = properties.get("feature").get("name")
generated_file = generate_template(gherkin_template, properties)
generated_file.file_name = _create_gherkin_feature_file_name(feature_name)
generated_file.overwrite = False
return generated_file
# This plugin produces only gherkin feature file and so it only needs one template
gherkin_template = None
if len(gherkin_templates) != 1:
raise GenerateGherkinException(
f"Unexpected number of templates loaded {len(gherkin_templates)}, \
expecting only gherkin feature file template. Loaded templates: {gherkin_templates}"
)
else:
gherkin_template = gherkin_templates[0]
return list(map(generate_file, properties_list))
|
37c833b85df5c9ed01cab3a9f9f38304808c2ab3
| 35,464 |
import base64
def _get_base64(data: str) -> str:
"""Base 64 encodes data."""
ebytes = base64.b64encode(data.encode("utf-8"))
estring = str(ebytes, "utf-8")
return estring
|
a7bd3080dba077077d96602eb35142db32b003de
| 35,466 |
from datetime import datetime
def parse_datetime(value):
"""Parses a string(ISO_8601) and return a datetime.datetime base UTC,
or parse datetime.datetime base other timezone and return a datetime.datetime base UTC timezone
"""
if isinstance(value, datetime.datetime):
if not value.tzinfo:
value = value.replace(tzinfo=LocalTimeZone)
return value.astimezone(UTC)
match = datetime_re.match(value)
if match:
kw = match.groupdict()
if kw['microsecond']:
kw['microsecond'] = kw['microsecond'].ljust(6, '0')
tzinfo = kw.pop('tzinfo')
tz = UTC
offset = 0
if tzinfo == 'Z':
offset = 0
elif tzinfo is not None:
offset_mins = int(tzinfo[-2:]) if len(tzinfo) > 3 else 0
offset = 60 * int(tzinfo[1:3]) + offset_mins
if tzinfo[0] == '-':
offset = -offset
else:
tz = LocalTimeZone
kw = {k: int(v) for k, v in kw.items() if v is not None}
kw['tzinfo'] = tz
dt = datetime.datetime(**kw)
dt += datetime.timedelta(minutes=offset)
return dt.astimezone(UTC)
|
271556ab449bef15461a8d5086187724c9492d2f
| 35,467 |
def _dev_http_archive_impl(ctx):
"""Implementation of the http_archive rule."""
if not ctx.attr.url and not ctx.attr.urls:
fail("At least one of url and urls must be provided")
if ctx.attr.build_file and ctx.attr.build_file_content:
fail("Only one of build_file and build_file_content can be provided.")
all_urls = []
if ctx.attr.urls:
all_urls = ctx.attr.urls
if ctx.attr.url:
all_urls = [ctx.attr.url] + all_urls
env_key = "BAZEL_" + ctx.name.upper() + "_PATH"
if env_key in ctx.os.environ:
repo_path = ctx.os.environ[env_key]
script_path = ctx.path(Label("@com_intel_plaidml//bzl:dev_repo.py"))
result = ctx.execute([
ctx.which("python"),
script_path,
repo_path,
ctx.path("."),
], quiet = False)
if not result.return_code == 0:
fail("dev_http_archive failure: %s\n" % result.stderr)
workspace_and_buildfile(ctx)
return update_attrs(ctx.attr, _dev_http_archive_attrs.keys(), {})
download_info = ctx.download_and_extract(
all_urls,
"",
ctx.attr.sha256,
ctx.attr.type,
ctx.attr.strip_prefix,
)
workspace_and_buildfile(ctx)
return update_attrs(ctx.attr, _dev_http_archive_attrs.keys(), {"sha256": download_info.sha256})
|
53d363db0af163965bc4b3b8331cf89bf91a1518
| 35,468 |
def setSortGroups(sortGroups=None):
"""
Return the sorting groups, either user defined or from the default list
"""
if sortGroups is None: # Default groups
return [('-inf', '+inf'), ('-inf', 100), (101, '+inf')]
else:
sortGroups.insert(0, ('-inf', '+inf'))
return sortGroups
|
f2e8cff00fe70627e81dcc0ce576f56e4d289228
| 35,469 |
def attach_clipped_regions_to_surface(surface, clipped, center):
"""Check the connectivty of a clipped surface, and attach all sections which are not
closest to the center of the clipping plane.
Args:
surface (vtkPolyData):
clipped (vtkPolyData): The clipped segments of the surface.
center (list): The center of the clipping point
Returns:
surface (vtkPolyData): The surface where only one segment has been removed.
"""
connectivity = vtk_compute_connectivity(clipped, mode="All")
if connectivity.GetNumberOfPoints() == 0:
return surface
region_id = get_point_data_array("RegionId", connectivity)
distances = []
regions = []
for i in range(int(region_id.max() + 1)):
regions.append(vtk_compute_threshold(connectivity, "RegionId", lower=i - 0.1, upper=i + 0.1, source=0))
locator = get_vtk_point_locator(regions[-1])
region_point = regions[-1].GetPoint(locator.FindClosestPoint(center))
distances.append(get_distance(region_point, center))
# Remove the region with the closest distance
regions.pop(distances.index(min(distances)))
# Add the other regions back to the surface
surface = vtk_merge_polydata(regions + [surface])
surface = vtk_clean_polydata(surface)
surface = vtk_triangulate_surface(surface)
return surface
|
0ddb832a8fa29c7167301d8dde116734e59268da
| 35,470 |
def ubuntu_spec(**kwargs):
"""Ubuntu specs."""
# Setup vars from kwargs
builder = kwargs['data']['builder']
builder_spec = kwargs['data']['builder_spec']
distro = kwargs['data']['distro']
version = kwargs['data']['version']
bootstrap_cfg = 'preseed.cfg'
# https://github.com/mrlesmithjr/packer-builder/issues/83
if builder == 'qemu':
boot_wait = '5s'
else:
boot_wait = '30s'
builder_spec.update(
{
'boot_command': [
'<enter><wait><f6><esc>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs><bs>',
'<bs><bs><bs><bs><bs><bs>',
'<wait>',
'/install/vmlinuz',
'<wait>',
' initrd=/install/initrd.gz',
'<wait>',
' auto=true',
'<wait>',
' priority=critical',
'<wait>',
' url=http://{{ .HTTPIP }}:{{ .HTTPPort }}/'f'{distro}-{version}-{bootstrap_cfg}', # noqa: E501
'<wait>',
'<enter>'
],
'boot_wait': f'{boot_wait}',
'shutdown_command': 'sudo /sbin/halt -h -p'
}
)
return bootstrap_cfg, builder_spec
|
fca2605c5b10f86519ef5ca952ab340e1f5560f2
| 35,471 |
def ds_as_cds(dataset):
"""
Converts Vega dataset into Bokeh ColumnDataSource data
"""
if len(dataset) == 0:
return {}
data = {k: [] for k, v in dataset[0].items()}
for item in dataset:
for k, v in item.items():
data[k].append(v)
data = {k: np.asarray(v) for k, v in data.items()}
return data
|
41db6678bedfd23bbf43aaf281d9d7c0afd87ee6
| 35,472 |
def genPubKey(privkey):
""" 生成公钥
"""
bio=BIO.MemoryBuffer(privkey)
key=EVP.load_key_bio(bio, util.no_passphrase_callback)
return key.get_rsa().as_pem()
|
d750b136c2d79ff83261d3ac3ae104cc72d30428
| 35,473 |
def CI_calc(mean, SE, CV=1.96):
"""
Calculate confidence interval.
:param mean: mean of data
:type mean: float
:param SE: standard error of data
:type SE: float
:param CV: critical value
:type CV:float
:return: confidence interval as tuple
"""
try:
CI_down = mean - CV * SE
CI_up = mean + CV * SE
return (CI_down, CI_up)
except Exception:
return ("None", "None")
|
be548e1ac1313f9e25428f4925399be27949f8ef
| 35,474 |
def bivecvec_invariants(q):
"""Calculates rotation-invariant attributes of a (vector, trivector) quantity.
Returns a 2D output: the norm of the vector and the trivector.
"""
result = [custom_norm(q[..., :3]), q[..., 3:4]]
return tf.concat(result, axis=-1)
|
43beda8a485ad56c5e7ba764dbd15757edd9653b
| 35,475 |
from typing import Optional
from typing import Sequence
def resolve_margins(margins: Optional[Sequence[float]]) -> Tuple4f:
""" Returns the box margins in CSS like order: top, right, bottom, left.
"""
if margins is None:
return 0, 0, 0, 0
count = len(margins)
if count == 4: # CSS: top, right, bottom, left
return margins[0], margins[1], margins[2], margins[3]
elif count == 3: # CSS: top, right, bottom, left=right
return margins[0], margins[1], margins[2], margins[1]
elif count == 2: # CSS: top, right, bottom=top, left=right
return margins[0], margins[1], margins[0], margins[1]
elif count == 1: # CSS: top, right=top, bottom=top, left=top
return margins[0], margins[0], margins[0], margins[0]
|
9411f7064b78247c73a6f7701fe916ce9a78b85a
| 35,477 |
def check_cookie_auth(api_key, required_scopes):
"""
Although OpenAPI explicitly supports JWT - we don't enforce this - simply use it as a quick way to
mock the behaviour of a more advanced system
:param api_key:
:param required_scopes:
:return: the decoded security token
"""
try:
return jwt.decode(api_key, __JWT_SECRET, algorithms=[__JWT_ALGORITHM])
except Exception as e:
raise Unauthorized()
|
29c502a4895b7dfcbe1002bd6d6c7739a3718b96
| 35,478 |
from typing import Any
async def validate_input(
hass: core.HomeAssistant, data: dict[str, Any]
) -> dict[str, Any]:
"""Validate the user input allows us to connect."""
session = async_get_clientsession(hass, verify_ssl=data[CONF_VERIFY_SSL])
protocol = "https" if data[CONF_SSL] else "http"
url = f"{protocol}://{data[CONF_HOST]}"
sma = pysma.SMA(session, url, data[CONF_PASSWORD], group=data[CONF_GROUP])
# new_session raises SmaAuthenticationException on failure
await sma.new_session()
device_info = await sma.device_info()
await sma.close_session()
return device_info
|
23687106fb590105dc412d1a1d90ea6958be0828
| 35,479 |
def resize_cube(cube, shape):
"""Return resized cube with the define shape"""
zoom = [float(x) / y for x, y in zip(shape, cube.shape)]
resized = sp.ndimage.zoom(cube, zoom)
assert resized.shape == shape
return resized
|
491272f81b02f1b92c0c4f886b956f3b8e48efc2
| 35,480 |
import torch
def load_image_from_cifar(image_id):
"""This is to load the image from CIFAR numpy and pre-process.
:param image_id: An integer as the image id to load from CIFAR.
:return img: A PyTorch Tensor.
"""
image = CIFAR[image_id]
image = image / 255.0 # Normalize
image = torch.from_numpy(image)
image = image.unsqueeze(0) # Change from 3D to 4D
image = image.float()
image = Variable(image, volatile=True)
return image
|
d235df286f0629312ff39967845b0b744eb8d531
| 35,481 |
def valueToCharacter(value):
"""
Returns the respective character for a value. Returns 'highest' character if no match is found
Args:
value ([int]): Value that should be mapped to a character
Returns:
[char]: Respective character for the given value
"""
for bar_threshold in BAR_CHARACTERS:
if(value < bar_threshold):
return BAR_CHARACTERS[bar_threshold]
return BAR_CHARACTERS[100]
|
c5df1d0185f0ab775eda8f573253d399824cbb98
| 35,482 |
import gc
def pickle_loads(inbox):
"""
Deserializes the first element of the input using the pickle protocol.
"""
gc.disable()
obj = cPickle.loads(inbox[0])
gc.enable()
return obj
|
38ce6a33b3313d97ffa021dde93d68e0387d1f69
| 35,483 |
def NOT_TENSOR_FILTER(arg_value):
"""Only keeps a value if it is not a Tensor or SparseTensor."""
return not arg_value.is_tensor and not arg_value.is_sparse_tensor
|
14eb28c1824f58bd7ef6ad1da96922891114fe5a
| 35,484 |
def rod_3D(x, gm=None, median=None, scaler1=None, scaler2=None):
"""Find ROD scores for 3D Data. note that gm, scaler1 and scaler2 will be
returned "as they are" and without being changed if the model has been fit
already.
Parameters
----------
x : array-like, 3D data points.
gm: list (default=None), the geometric median
median: float (default=None), MAD median
scaler1: obj (default=None), MinMaxScaler of Angles group 1
scaler2: obj (default=None), MinMaxScaler of Angles group 2
Returns
-------
decision_scores, gm, scaler1, scaler2
"""
# find the geometric median if it is not already fit
gm = geometric_median(x) if gm is None else gm
# find its norm and center data around it
norm_ = np.linalg.norm(gm)
_x = x - gm
# calculate the scaled angles between the geometric median and each data point vector
v_norm = np.linalg.norm(_x, axis=1)
gammas, scaler1, scaler2 = scale_angles(
np.arccos(np.clip(np.dot(_x, gm) / (v_norm * norm_), -1, 1)),
scaler1=scaler1,
scaler2=scaler2)
# apply the ROD main equation to find the rotation costs
costs = np.power(v_norm, 3) * np.cos(gammas) * np.square(np.sin(gammas))
# apply MAD to calculate the decision scores
decision_scores, median = mad(costs, median=median)
return decision_scores, list(gm), median, scaler1, scaler2
|
df3347689aef695ff9ab1a8b8853310f14ff9828
| 35,485 |
def check_if_neighbors_match(src_neighbor, trg_neighbor):
"""Check if any source and target neighbors match and return matches
Args:
src_neighbor (list): Source Neighbor List
trg_neighbor (list): Target Neighbor List
Returns:
list: Matching of neighbors.
"""
matching = {}
for current_neighbor_index in range(len(src_neighbor)):
# Looking for matches
if int(trg_neighbor[src_neighbor[current_neighbor_index]]) == current_neighbor_index:
matching[current_neighbor_index] = src_neighbor[current_neighbor_index]
return matching
|
c4d91ffca1f175e9964ca67c8b2200b1848b56d9
| 35,486 |
def build_torch_optimizer_for_bert(model, opt):
"""
no_decay = ["bias", "LayerNorm.weight"]
encoder_params = [
{
"params": [p for n, p in model.encoder.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
{
"params": [p for n, p in model.encoder.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
}
]
decoder_params = [
{
"params": [p for n, p in model.decoder.named_parameters() if not any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
},
{
"params": [p for n, p in model.decoder.named_parameters() if any(nd in n for nd in no_decay)],
"weight_decay": 0.0,
}]
etc_params = [p for p in model.generator.parameters() if p.requires_grad] + [p for p in model.separator.parameters() if p.requires_grad]
"""
encoder_params = [p for p in model.encoder.parameters() if p.requires_grad]
# decoder_params = [p for n, p in model.decoder.named_parameters() if p.requires_grad and 'cross' not in n]
# etc_params = [p for n, p in model.decoder.named_parameters() if p.requires_grad and 'cross' in n] + [p for p in model.generator.parameters() if p.requires_grad] + [p for p in model.separator.parameters() if p.requires_grad]
decoder_params = [p for n, p in model.decoder.named_parameters() if p.requires_grad]
etc_params = [p for p in model.generator.parameters() if p.requires_grad] + [p for p in model.separator.parameters() if p.requires_grad]
betas = [opt.adam_beta1, opt.adam_beta2]
encoder_lr = opt.learning_rate if opt.enc_learning_rate == 0.0 else opt.enc_learning_rate
decoder_lr = opt.learning_rate if opt.dec_learning_rate == 0.0 else opt.dec_learning_rate
etc_lr = opt.learning_rate if opt.etc_learning_rate == 0.0 else opt.etc_learning_rate
if opt.optim == 'sgd':
if len(encoder_params) > 0 and len(decoder_params) > 0:
optimizer = {
"encoder": optim.SGD(encoder_params, lr=encoder_lr),
"decoder": optim.SGD(decoder_params, lr=decoder_lr),
"etc": optim.SGD(etc_params, lr=etc_lr)
}
elif len(decoder_params) > 0:
optimizer = {
"decoder": optim.SGD(decoder_params, lr=decoder_lr),
"etc": optim.SGD(etc_params, lr=etc_lr)
}
else:
optimizer = {
"etc": optim.SGD(etc_params, lr=etc_lr)
}
elif opt.optim == 'adagrad':
if len(encoder_params) > 0 and len(decoder_params) > 0:
optimizer = {
"encoder": optim.Adagrad(
encoder_params,
lr=encoder_lr,
initial_accumulator_value=opt.adagrad_accumlator_init),
"decoder": optim.Adagrad(
decoder_params,
lr=decoder_lr,
initial_accumulator_value=opt.adagrad_accumlator_init),
"etc": optim.Adagrad(
etc_params,
lr=etc_lr,
initial_accumulator_value=opt.adagrad_accumlator_init)
}
elif len(decoder_params) > 0:
optimizer = {
"decoder": optim.Adagrad(
decoder_params,
lr=decoder_lr,
initial_accumulator_value=opt.adagrad_accumlator_init),
"etc": optim.Adagrad(
etc_params,
lr=etc_lr,
initial_accumulator_value=opt.adagrad_accumlator_init)
}
else:
optimizer = {
"etc": optim.Adagrad(
etc_params,
lr=etc_lr,
initial_accumulator_value=opt.adagrad_accumlator_init)
}
elif opt.optim == 'adadelta':
if llen(encoder_params) > 0 and len(decoder_params) > 0:
optimizer = {
"encoder": optim.Adadelta(encoder_params, lr=encoder_lr),
"decoder": optim.Adadelta(decoder_params, lr=decoder_lr),
"etc": optim.Adadelta(etc_params, lr=etc_lr)
}
elif len(decoder_params) > 0:
optimizer = {
"decoder": optim.Adadelta(decoder_params, lr=decoder_lr),
"etc": optim.Adadelta(etc_params, lr=etc_lr)
}
else:
optimizer = {
"etc": optim.Adadelta(etc_params, lr=etc_lr)
}
elif opt.optim == 'adam':
if len(encoder_params) > 0 and len(decoder_params) > 0:
optimizer = {
"encoder": optim.Adam(encoder_params, lr=encoder_lr, betas=betas, eps=1e-12),
"decoder": optim.Adam(decoder_params, lr=decoder_lr, betas=betas, eps=1e-12),
"etc": optim.Adam(etc_params, lr=etc_lr, betas=betas, eps=1e-12)
}
elif len(decoder_params) > 0:
optimizer = {
"decoder": optim.Adam(decoder_params, lr=decoder_lr, betas=betas, eps=1e-12),
"etc": optim.Adam(etc_params, lr=etc_lr, betas=betas, eps=1e-12)
}
else:
optimizer = {
"etc": optim.Adam(etc_params, lr=etc_lr, betas=betas, eps=1e-12)
}
else:
raise ValueError("Invalid optimizer type: " + opt.optim)
return optimizer
|
20672e152874d51908855afcf46b26d0a4f5501e
| 35,487 |
def docker_compose(command):
"""
Run a docker-compose command
:param command: Command you want to run
"""
with env.cd(env.project_dir):
return env.run("docker-compose -f {file} {command}".format(file=env.compose_file, command=command))
|
9e51fdacb42057ab607ec00b563db4973ede7e96
| 35,488 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.