Code
stringlengths 103
85.9k
| Summary
listlengths 0
94
|
---|---|
Please provide a description of the function:def token_perplexity_micro(eval_data, predictions, scores, learner='ignored'):
'''
Return the micro-averaged per-token perplexity `exp(-score / num_tokens)`
computed over the entire corpus, as a length-1 list of floats.
The log scores in `scores` should be base e (`exp`, `log`).
>>> refs = [Instance(None, ''),
... Instance(None, ''),
... Instance(None, '2')]
>>> scores = [np.log(1.0), np.log(0.25), np.log(1 / 64.)]
>>> perplexity = token_perplexity_micro(refs, None, scores)
>>> [round(p) for p in perplexity]
... # sequence perplexities: [1, 4, 64]
... # per-token perplexities: [1, 4, 8]
... # micro-average: gmean([1, 4, 8, 8])
[4.0]
'''
lens = np.array([len(_maybe_tokenize(inst.output)) + 1 for inst in eval_data])
return [np.exp(np.average(-np.array(scores) / lens, weights=lens))] | []
|
Please provide a description of the function:def aic(eval_data, predictions, scores, learner):
'''
Return Akaike information criterion (AIC) scores for the given
`learner` producing the given `scores` (log likelihoods in base e):
aic = 2 * learner.num_params - 2 * sum(log_2(exp(scores)))
The result is a list *one element longer* than the number of scores:
the last element of this list is the penalty for the learner from the
number of parameters, and the others are negative log likelihoods in
base 2.
The standard way to aggregate this metric is to sum the resulting list.
>>> learner = Learner(); learner.num_params = 1024
>>> aic(None, None, [np.log(0.5), np.log(0.125), np.log(0.25), np.log(0.5)], learner)
[2.0, 6.0, 4.0, 2.0, 2048.0]
'''
return (-2.0 * np.array(scores) / np.log(2.0)).tolist() + [2.0 * float(learner.num_params)] | []
|
Please provide a description of the function:def aic_averaged(eval_data, predictions, scores, learner):
'''
Return Akaike information criterion (AIC) scores for the given
`learner` producing the given `scores` (log likelihoods in base e):
aic = 2 * learner.num_params - 2 * sum(log_2(exp(scores)))
The result is a list of the same length as the number of scores.
The penalty from the number of parameters is divided by the number of
scores and added to the contribution of each score; thus, `aic` and
`aic_averaged` will have the same mean but yield different-size lists.
The standard way to aggregate this metric is to sum the resulting list.
>>> learner = Learner(); learner.num_params = 1024
>>> aic_averaged(None, None, [np.log(0.5), np.log(0.125), np.log(0.25), np.log(0.5)], learner)
[514.0, 518.0, 516.0, 514.0]
'''
scores = np.array(scores)
penalty = 2.0 * float(learner.num_params) / len(scores)
return (penalty - 2.0 * scores / np.log(2.0)).tolist() | []
|
Please provide a description of the function:def encrypt_variable(variable, build_repo, *, tld='.org', public_key=None,
travis_token=None, **login_kwargs):
if not isinstance(variable, bytes):
raise TypeError("variable should be bytes")
if not b"=" in variable:
raise ValueError("variable should be of the form 'VARIABLE=value'")
if not public_key:
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
headersv3 = {**_headers, **Travis_APIv3}
if travis_token:
headersv3['Authorization'] = 'token {}'.format(travis_token)
res = requests.get('https://api.travis-ci.com/repo/{build_repo}/key_pair/generated'.format(build_repo=urllib.parse.quote(build_repo,
safe='')), headers=headersv3)
if res.json().get('file') == 'not found':
raise RuntimeError("Could not find the Travis public key for %s" % build_repo)
public_key = res.json()['public_key']
else:
res = requests.get('https://api.travis-ci{tld}/repos/{build_repo}/key'.format(build_repo=build_repo,
tld=tld),
headers=headersv2)
public_key = res.json()['key']
if res.status_code == requests.codes.not_found:
raise RuntimeError('Could not find requested repo on Travis. Is Travis enabled?')
res.raise_for_status()
public_key = public_key.replace("RSA PUBLIC KEY", "PUBLIC KEY").encode('utf-8')
key = serialization.load_pem_public_key(public_key, backend=default_backend())
pad = padding.PKCS1v15()
return base64.b64encode(key.encrypt(variable, pad)) | [
"\n Encrypt an environment variable for ``build_repo`` for Travis\n\n ``variable`` should be a bytes object, of the form ``b'ENV=value'``.\n\n ``build_repo`` is the repo that ``doctr deploy`` will be run from. It\n should be like 'drdoctr/doctr'.\n\n ``tld`` should be ``'.org'`` for travis-ci.org and ``'.com'`` for\n travis-ci.com.\n\n ``public_key`` should be a pem format public key, obtained from Travis if\n not provided.\n\n If the repo is private, travis_token should be as returned by\n ``get_temporary_token(**login_kwargs)``. A token being present\n automatically implies ``tld='.com'``.\n\n "
]
|
Please provide a description of the function:def encrypt_to_file(contents, filename):
if not filename.endswith('.enc'):
raise ValueError("%s does not end with .enc" % filename)
key = Fernet.generate_key()
fer = Fernet(key)
encrypted_file = fer.encrypt(contents)
with open(filename, 'wb') as f:
f.write(encrypted_file)
return key | [
"\n Encrypts ``contents`` and writes it to ``filename``.\n\n ``contents`` should be a bytes string. ``filename`` should end with\n ``.enc``.\n\n Returns the secret key used for the encryption.\n\n Decrypt the file with :func:`doctr.travis.decrypt_file`.\n\n "
]
|
Please provide a description of the function:def GitHub_login(*, username=None, password=None, OTP=None, headers=None):
if not username:
username = input("What is your GitHub username? ")
if not password:
password = getpass("Enter the GitHub password for {username}: ".format(username=username))
headers = headers or {}
if OTP:
headers['X-GitHub-OTP'] = OTP
auth = HTTPBasicAuth(username, password)
r = requests.get('https://api.github.com/', auth=auth, headers=headers)
if r.status_code == 401:
two_factor = r.headers.get('X-GitHub-OTP')
if two_factor:
if OTP:
print(red("Invalid authentication code"))
# For SMS, we have to make a fake request (that will fail without
# the OTP) to get GitHub to send it. See https://github.com/drdoctr/doctr/pull/203
auth_header = base64.urlsafe_b64encode(bytes(username + ':' + password, 'utf8')).decode()
login_kwargs = {'auth': None, 'headers': {'Authorization': 'Basic {}'.format(auth_header)}}
try:
generate_GitHub_token(**login_kwargs)
except (requests.exceptions.HTTPError, GitHubError):
pass
print("A two-factor authentication code is required:", two_factor.split(';')[1].strip())
OTP = input("Authentication code: ")
return GitHub_login(username=username, password=password, OTP=OTP, headers=headers)
raise AuthenticationFailed("invalid username or password")
GitHub_raise_for_status(r)
return {'auth': auth, 'headers': headers} | [
"\n Login to GitHub.\n\n If no username, password, or OTP (2-factor authentication code) are\n provided, they will be requested from the command line.\n\n Returns a dict of kwargs that can be passed to functions that require\n authenticated connections to GitHub.\n "
]
|
Please provide a description of the function:def GitHub_raise_for_status(r):
# This will happen if the doctr session has been running too long and the
# OTP code gathered from GitHub_login has expired.
# TODO: Refactor the code to re-request the OTP without exiting.
if r.status_code == 401 and r.headers.get('X-GitHub-OTP'):
raise GitHubError("The two-factor authentication code has expired. Please run doctr configure again.")
if r.status_code == 403 and r.headers.get('X-RateLimit-Remaining') == '0':
reset = int(r.headers['X-RateLimit-Reset'])
limit = int(r.headers['X-RateLimit-Limit'])
reset_datetime = datetime.datetime.fromtimestamp(reset, datetime.timezone.utc)
relative_reset_datetime = reset_datetime - datetime.datetime.now(datetime.timezone.utc)
# Based on datetime.timedelta.__str__
mm, ss = divmod(relative_reset_datetime.seconds, 60)
hh, mm = divmod(mm, 60)
def plural(n):
return n, abs(n) != 1 and "s" or ""
s = "%d minute%s" % plural(mm)
if hh:
s = "%d hour%s, " % plural(hh) + s
if relative_reset_datetime.days:
s = ("%d day%s, " % plural(relative_reset_datetime.days)) + s
authenticated = limit >= 100
message = .format(limit=limit, un="" if authenticated else "un", documentation_url=r.json()["documentation_url"])
if authenticated:
message +=
else:
message +=
message += .format(s=s)
raise GitHubError(message)
r.raise_for_status() | [
"\n Call instead of r.raise_for_status() for GitHub requests\n\n Checks for common GitHub response issues and prints messages for them.\n ",
"\\\nYour GitHub API rate limit has been hit. GitHub allows {limit} {un}authenticated\nrequests per hour. See {documentation_url}\nfor more information.\n",
"\nNote that GitHub's API limits are shared across all oauth applications. A\ncommon cause of hitting the rate limit is the Travis \"sync account\" button.\n",
"\nYou can get a higher API limit by authenticating. Try running doctr configure\nagain without the --no-upload-key flag.\n",
"\nYour rate limits will reset in {s}.\\\n"
]
|
Please provide a description of the function:def GitHub_post(data, url, *, auth, headers):
r = requests.post(url, auth=auth, headers=headers, data=json.dumps(data))
GitHub_raise_for_status(r)
return r.json() | [
"\n POST the data ``data`` to GitHub.\n\n Returns the json response from the server, or raises on error status.\n\n "
]
|
Please provide a description of the function:def get_travis_token(*, GitHub_token=None, **login_kwargs):
_headers = {
'Content-Type': 'application/json',
'User-Agent': 'MyClient/1.0.0',
}
headersv2 = {**_headers, **Travis_APIv2}
token_id = None
try:
if not GitHub_token:
print(green("I need to generate a temporary token with GitHub to authenticate with Travis. You may get a warning email from GitHub about this."))
print(green("It will be deleted immediately. If you still see it after this at https://github.com/settings/tokens after please delete it manually."))
# /auth/github doesn't seem to exist in the Travis API v3.
tok_dict = generate_GitHub_token(scopes=["read:org", "user:email", "repo"],
note="temporary token for doctr to auth against travis (delete me)",
**login_kwargs)
GitHub_token = tok_dict['token']
token_id = tok_dict['id']
data = {'github_token': GitHub_token}
res = requests.post('https://api.travis-ci.com/auth/github', data=json.dumps(data), headers=headersv2)
return res.json()['access_token']
finally:
if token_id:
delete_GitHub_token(token_id, **login_kwargs) | [
"\n Generate a temporary token for authenticating with Travis\n\n The GitHub token can be passed in to the ``GitHub_token`` keyword\n argument. If no token is passed in, a GitHub token is generated\n temporarily, and then immediately deleted.\n\n This is needed to activate a private repo\n\n Returns the secret token. It should be added to the headers like\n\n headers['Authorization'] = \"token {}\".format(token)\n\n "
]
|
Please provide a description of the function:def generate_GitHub_token(*, note="Doctr token for pushing to gh-pages from Travis", scopes=None, **login_kwargs):
if scopes is None:
scopes = ['public_repo']
AUTH_URL = "https://api.github.com/authorizations"
data = {
"scopes": scopes,
"note": note,
"note_url": "https://github.com/drdoctr/doctr",
"fingerprint": str(uuid.uuid4()),
}
return GitHub_post(data, AUTH_URL, **login_kwargs) | [
"\n Generate a GitHub token for pushing from Travis\n\n The scope requested is public_repo.\n\n If no password or OTP are provided, they will be requested from the\n command line.\n\n The token created here can be revoked at\n https://github.com/settings/tokens.\n "
]
|
Please provide a description of the function:def delete_GitHub_token(token_id, *, auth, headers):
r = requests.delete('https://api.github.com/authorizations/{id}'.format(id=token_id), auth=auth, headers=headers)
GitHub_raise_for_status(r) | [
"Delete a temporary GitHub token"
]
|
Please provide a description of the function:def upload_GitHub_deploy_key(deploy_repo, ssh_key, *, read_only=False,
title="Doctr deploy key for pushing to gh-pages from Travis", **login_kwargs):
DEPLOY_KEY_URL = "https://api.github.com/repos/{deploy_repo}/keys".format(deploy_repo=deploy_repo)
data = {
"title": title,
"key": ssh_key,
"read_only": read_only,
}
return GitHub_post(data, DEPLOY_KEY_URL, **login_kwargs) | [
"\n Uploads a GitHub deploy key to ``deploy_repo``.\n\n If ``read_only=True``, the deploy_key will not be able to write to the\n repo.\n "
]
|
Please provide a description of the function:def generate_ssh_key():
key = rsa.generate_private_key(
backend=default_backend(),
public_exponent=65537,
key_size=4096
)
private_key = key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.NoEncryption())
public_key = key.public_key().public_bytes(
serialization.Encoding.OpenSSH,
serialization.PublicFormat.OpenSSH
)
return private_key, public_key | [
"\n Generates an SSH deploy public and private key.\n\n Returns (private key, public key), a tuple of byte strings.\n "
]
|
Please provide a description of the function:def check_repo_exists(deploy_repo, service='github', *, auth=None,
headers=None, ask=False):
headers = headers or {}
if deploy_repo.count("/") != 1:
raise RuntimeError('"{deploy_repo}" should be in the form username/repo'.format(deploy_repo=deploy_repo))
user, repo = deploy_repo.split('/')
if service == 'github':
REPO_URL = 'https://api.github.com/repos/{user}/{repo}'
elif service == 'travis' or service == 'travis-ci.com':
REPO_URL = 'https://api.travis-ci.com/repo/{user}%2F{repo}'
headers = {**headers, **Travis_APIv3}
elif service == 'travis-ci.org':
REPO_URL = 'https://api.travis-ci.org/repo/{user}%2F{repo}'
headers = {**headers, **Travis_APIv3}
else:
raise RuntimeError('Invalid service specified for repo check (should be one of {"github", "travis", "travis-ci.com", "travis-ci.org"}')
wiki = False
if repo.endswith('.wiki') and service == 'github':
wiki = True
repo = repo[:-5]
def _try(url):
r = requests.get(url, auth=auth, headers=headers)
if r.status_code in [requests.codes.not_found, requests.codes.forbidden]:
return False
if service == 'github':
GitHub_raise_for_status(r)
else:
r.raise_for_status()
return r
r = _try(REPO_URL.format(user=urllib.parse.quote(user),
repo=urllib.parse.quote(repo)))
r_active = r and (service == 'github' or r.json().get('active', False))
if service == 'travis':
REPO_URL = 'https://api.travis-ci.org/repo/{user}%2F{repo}'
r_org = _try(REPO_URL.format(user=urllib.parse.quote(user),
repo=urllib.parse.quote(repo)))
r_org_active = r_org and r_org.json().get('active', False)
if not r_active:
if not r_org_active:
raise RuntimeError('"{user}/{repo}" not found on travis-ci.org or travis-ci.com'.format(user=user, repo=repo))
r = r_org
r_active = r_org_active
service = 'travis-ci.org'
else:
if r_active and r_org_active:
if ask:
while True:
print(green("{user}/{repo} appears to exist on both travis-ci.org and travis-ci.com.".format(user=user, repo=repo)))
preferred = input("Which do you want to use? [{default}/travis-ci.org] ".format(default=blue("travis-ci.com")))
preferred = preferred.lower().strip()
if preferred in ['o', 'org', '.org', 'travis-ci.org']:
r = r_org
service = 'travis-ci.org'
break
elif preferred in ['c', 'com', '.com', 'travis-ci.com', '']:
service = 'travis-ci.com'
break
else:
print(red("Please type 'travis-ci.com' or 'travis-ci.org'."))
else:
service = 'travis-ci.com'
else:
# .com but not .org.
service = 'travis-ci.com'
if not r_active:
msg = '' if auth else '. If the repo is private, then you need to authenticate.'
raise RuntimeError('"{user}/{repo}" not found on {service}{msg}'.format(user=user,
repo=repo,
service=service,
msg=msg))
private = r.json().get('private', False)
if wiki and not private:
# private wiki needs authentication, so skip check for existence
p = subprocess.run(['git', 'ls-remote', '-h', 'https://github.com/{user}/{repo}.wiki'.format(
user=user, repo=repo)], stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if p.stderr or p.returncode:
raise RuntimeError('Wiki not found. Please create a wiki')
return {
'private': private,
'service': service,
} | [
"\n Checks that the repository exists on GitHub.\n\n This should be done before attempting generate a key to deploy to that\n repo.\n\n Raises ``RuntimeError`` if the repo is not valid.\n\n Returns a dictionary with the following keys:\n\n - 'private': Indicates whether or not the repo requires authorization to\n access. Private repos require authorization.\n - 'service': For service='travis', is 'travis-ci.com' or 'travis-ci.org',\n depending on which should be used. Otherwise it is just equal to ``service``.\n\n For service='travis', if ask=True, it will ask at the command line if both\n travis-ci.org and travis-ci.com exist. If ask=False, service='travis' will\n check travis-ci.com first and only check travis-ci.org if it doesn't\n exist. ask=True does nothing for service='github',\n service='travis-ci.com', service='travis-ci.org'.\n\n "
]
|
Please provide a description of the function:def guess_github_repo():
p = subprocess.run(['git', 'ls-remote', '--get-url', 'origin'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if p.stderr or p.returncode:
return False
url = p.stdout.decode('utf-8').strip()
m = GIT_URL.fullmatch(url)
if not m:
return False
return m.group(1) | [
"\n Guesses the github repo for the current directory\n\n Returns False if no guess can be made.\n "
]
|
Please provide a description of the function:def make_parser_with_config_adder(parser, config):
def internal(arg, **kwargs):
invert = {
'store_true':'store_false',
'store_false':'store_true',
}
if arg.startswith('--no-'):
key = arg[5:]
else:
key = arg[2:]
if 'default' in kwargs:
if key in config:
kwargs['default'] = config[key]
del config[key]
action = kwargs.get('action')
if action in invert:
exclusive_grp = parser.add_mutually_exclusive_group()
exclusive_grp.add_argument(arg, **kwargs)
kwargs['action'] = invert[action]
kwargs['help'] = 'Inverse of "%s"' % arg
if arg.startswith('--no-'):
arg = '--%s' % arg[5:]
else:
arg = '--no-%s' % arg[2:]
exclusive_grp.add_argument(arg, **kwargs)
else:
parser.add_argument(arg, **kwargs)
return internal | [
"factory function for a smarter parser:\n\n return an utility function that pull default from the config as well.\n\n Pull the default for parser not only from the ``default`` kwarg,\n but also if an identical value is find in ``config`` where leading\n ``--`` or ``--no`` is removed.\n\n If the option is a boolean flag, automatically register an opposite,\n exclusive option by prepending or removing the `--no-`. This is useful\n to overwrite config in ``.travis.yml``\n\n Mutate the config object and remove know keys in order to detect unused\n options afterwoard.\n "
]
|
Please provide a description of the function:def get_parser(config=None):
# This uses RawTextHelpFormatter so that the description (the docstring of
# this module) is formatted correctly. Unfortunately, that means that
# parser help is not text wrapped (but all other help is).
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawTextHelpFormatter, epilog=,
)
if not config:
config={}
parser.add_argument('-V', '--version', action='version', version='doctr ' + __version__)
subcommand = parser.add_subparsers(title='subcommand', dest='subcommand')
deploy_parser = subcommand.add_parser('deploy', help=)
deploy_parser.set_defaults(func=deploy)
deploy_parser_add_argument = make_parser_with_config_adder(deploy_parser, config)
deploy_parser_add_argument('--force', action='store_true', help=)
deploy_parser_add_argument('deploy_directory', type=str, nargs='?',
help=)
deploy_parser_add_argument('--token', action='store_true', default=False,
help=)
deploy_parser_add_argument('--key-path', default=None,
help=)
deploy_parser_add_argument('--built-docs', default=None,
help=)
deploy_parser.add_argument('--deploy-branch-name', default=None,
help=)
deploy_parser_add_argument('--tmp-dir', default=None,
help=argparse.SUPPRESS)
deploy_parser_add_argument('--deploy-repo', default=None, help=)
deploy_parser_add_argument('--branch-whitelist', default=None, nargs='*',
help=, metavar="BRANCH")
deploy_parser_add_argument('--no-require-master', dest='require_master', action='store_false',
default=True, help=)
deploy_parser_add_argument('--command', default=None,
help=)
deploy_parser_add_argument('--no-sync', dest='sync', action='store_false',
default=True, help=)
deploy_parser.add_argument('--no-temp-dir', dest='temp_dir',
action='store_false', default=True, help=)
deploy_parser_add_argument('--no-push', dest='push', action='store_false',
default=True, help="Run all the steps except the last push step. "
"Useful for debugging")
deploy_parser_add_argument('--build-tags', action='store_true',
default=False, help=)
deploy_parser_add_argument('--gh-pages-docs', default=None,
help=)
deploy_parser_add_argument('--exclude', nargs='+', default=(), help=)
if config:
print('Warning, The following options in `.travis.yml` were not recognized:\n%s' % json.dumps(config, indent=2))
configure_parser = subcommand.add_parser('configure', help="Configure doctr. This command should be run locally (not on Travis).")
configure_parser.set_defaults(func=configure)
configure_parser.add_argument('--force', action='store_true', help=)
configure_parser.add_argument('--token', action="store_true", default=False,
help=)
configure_parser.add_argument("--no-upload-key", action="store_false", default=True,
dest="upload_key", help=)
configure_parser.add_argument("--no-authenticate", action="store_false",
default=True, dest="authenticate", help=)
configure_parser.add_argument('--key-path', default=None,
help=)
configure_parser.add_argument('--travis-tld', default=None,
help=, choices=['c', 'com', '.com', 'travis-ci.com', 'o', 'org', '.org',
'travis-ci.org'])
return parser | [
"\n return a parser suitable to parse CL arguments.\n\n Parameters\n ----------\n\n config: dict\n Default values to fall back on, if not given.\n\n Returns\n -------\n\n An argparse parser configured to parse the command lines arguments of\n sys.argv which will default on values present in ``config``.\n ",
"\nRun --help on the subcommands like 'doctr deploy --help' to see the\noptions available.\n ",
"Deploy the docs to GitHub from Travis.",
"Run the deploy command even\n if we do not appear to be on Travis.",
"Directory to deploy the html documentation to on gh-pages.",
"Push to GitHub using a personal access token. Use this if you\n used 'doctr configure --token'.",
"Path of the encrypted GitHub deploy key. The default is github_deploy_key_+\n deploy respository name + .enc.",
"Location of the built html documentation to be deployed to gh-pages. If not\n specified, Doctr will try to automatically detect build location\n (right now only works for Sphinx docs).",
"Name of the branch to deploy to (default: 'master' for ``*.github.io``\n and wiki repos, 'gh-pages' otherwise)",
"Repo to\n deploy the docs to. By default, it deploys to the repo Doctr is run from.",
"Branches to deploy from. Pass no arguments to not build on any branch\n (typically used in conjunction with --build-tags). Note that you can\n deploy from every branch with --no-require-master.",
"Allow docs to be pushed from a branch other than master",
"Command to be run before committing and pushing. This command\n will be run from the deploy repository/branch. If the command creates\n additional files that should be deployed, they should be added to the\n index.",
"Don't sync any files. This is generally used in\n conjunction with the --command flag, for instance, if the command syncs\n the files for you. Any files you wish to commit should be added to the\n index.",
"Don't copy the\n --built-docs directory to a temporary directory.",
"Deploy on tag builds. On a tag build,\n $TRAVIS_TAG is set to the name of the tag. The default is to not\n deploy on tag builds. Note that this will still build on a branch,\n unless --branch-whitelist (with no arguments) is passed.",
"!!DEPRECATED!! Directory to deploy the html documentation to on gh-pages.\n The default is %(default)r. The deploy directory should be passed as\n the first argument to 'doctr deploy'. This flag is kept for backwards\n compatibility.",
"Files and\n directories from --built-docs that are not copied.",
"Run the configure command even\n if we appear to be on Travis.",
"Generate a personal access token to push to GitHub. The default is to use a\n deploy key. WARNING: This will grant read/write access to all the\n public repositories for the user. This option is not recommended\n unless you are using a separate GitHub user for deploying.",
"Don't automatically upload the deploy key to GitHub. To prevent doctr\n configure from asking for your GitHub credentials, use\n --no-authenticate.",
"Don't authenticate with GitHub. This option implies --no-upload-key. This\n option is also not compatible with private repositories.",
"Path to save the encrypted GitHub deploy key. The default is github_deploy_key_+\n deploy respository name. The .enc extension is added to the file automatically.",
"Travis tld to use. Should be either '.com' or '.org'. The default is to\n check which the repo is activated on and ask if it is activated on\n both."
]
|
Please provide a description of the function:def get_config():
p = Path('.travis.yml')
if not p.exists():
return {}
with p.open() as f:
travis_config = yaml.safe_load(f.read())
config = travis_config.get('doctr', {})
if not isinstance(config, dict):
raise ValueError('config is not a dict: {}'.format(config))
return config | [
"\n This load some configuration from the ``.travis.yml``, if file is present,\n ``doctr`` key if present.\n "
]
|
Please provide a description of the function:def get_deploy_key_repo(deploy_repo, keypath, key_ext=''):
# deploy key of the original repo has write access to the wiki
deploy_key_repo = deploy_repo[:-5] if deploy_repo.endswith('.wiki') else deploy_repo
# Automatically determine environment variable and key file name from deploy repo name
# Special characters are substituted with a hyphen(-) by GitHub
snake_case_name = deploy_key_repo.replace('-', '_').replace('.', '_').replace('/', '_').lower()
env_name = 'DOCTR_DEPLOY_ENCRYPTION_KEY_' + snake_case_name.upper()
keypath = keypath or 'github_deploy_key_' + snake_case_name + key_ext
return (deploy_key_repo, env_name, keypath) | [
"\n Return (repository of which deploy key is used, environment variable to store\n the encryption key of deploy key, path of deploy key file)\n "
]
|
Please provide a description of the function:def configure(args, parser):
if not args.force and on_travis():
parser.error(red("doctr appears to be running on Travis. Use "
"doctr configure --force to run anyway."))
if not args.authenticate:
args.upload_key = False
if args.travis_tld:
if args.travis_tld in ['c', 'com', '.com', 'travis-ci.com']:
args.travis_tld = 'travis-ci.com'
else:
args.travis_tld = 'travis-ci.org'
print(green(dedent()))
login_kwargs = {}
if args.authenticate:
while not login_kwargs:
try:
login_kwargs = GitHub_login()
except AuthenticationFailed as e:
print(red(e))
else:
login_kwargs = {'auth': None, 'headers': None}
GitHub_token = None
get_build_repo = False
default_repo = guess_github_repo()
while not get_build_repo:
try:
if default_repo:
build_repo = input("What repo do you want to build the docs for? [{default_repo}] ".format(default_repo=blue(default_repo)))
if not build_repo:
build_repo = default_repo
else:
build_repo = input("What repo do you want to build the docs for (org/reponame, like 'drdoctr/doctr')? ")
is_private = check_repo_exists(build_repo, service='github',
**login_kwargs)['private']
if is_private and not args.authenticate:
sys.exit(red("--no-authenticate is not supported for private repositories."))
headers = {}
travis_token = None
if is_private:
if args.token:
GitHub_token = generate_GitHub_token(note="Doctr token for pushing to gh-pages from Travis (for {build_repo}).".format(build_repo=build_repo),
scopes=["read:org", "user:email", "repo"], **login_kwargs)['token']
travis_token = get_travis_token(GitHub_token=GitHub_token, **login_kwargs)
headers['Authorization'] = "token {}".format(travis_token)
service = args.travis_tld if args.travis_tld else 'travis'
c = check_repo_exists(build_repo, service=service, ask=True, headers=headers)
tld = c['service'][-4:]
is_private = c['private'] or is_private
if is_private and not args.authenticate:
sys.exit(red("--no-authenticate is not supported for private repos."))
get_build_repo = True
except GitHubError:
raise
except RuntimeError as e:
print(red('\n{!s:-^{}}\n'.format(e, 70)))
get_deploy_repo = False
while not get_deploy_repo:
try:
deploy_repo = input("What repo do you want to deploy the docs to? [{build_repo}] ".format(build_repo=blue(build_repo)))
if not deploy_repo:
deploy_repo = build_repo
if deploy_repo != build_repo:
check_repo_exists(deploy_repo, service='github', **login_kwargs)
get_deploy_repo = True
except GitHubError:
raise
except RuntimeError as e:
print(red('\n{!s:-^{}}\n'.format(e, 70)))
N = IncrementingInt(1)
header = green("\n================== You should now do the following ==================\n")
if args.token:
if not GitHub_token:
GitHub_token = generate_GitHub_token(**login_kwargs)['token']
encrypted_variable = encrypt_variable("GH_TOKEN={GitHub_token}".format(GitHub_token=GitHub_token).encode('utf-8'),
build_repo=build_repo, tld=tld, travis_token=travis_token, **login_kwargs)
print(dedent())
print(header)
else:
deploy_key_repo, env_name, keypath = get_deploy_key_repo(deploy_repo, args.key_path)
private_ssh_key, public_ssh_key = generate_ssh_key()
key = encrypt_to_file(private_ssh_key, keypath + '.enc')
del private_ssh_key # Prevent accidental use below
public_ssh_key = public_ssh_key.decode('ASCII')
encrypted_variable = encrypt_variable(env_name.encode('utf-8') + b"=" + key,
build_repo=build_repo, tld=tld, travis_token=travis_token, **login_kwargs)
deploy_keys_url = 'https://github.com/{deploy_repo}/settings/keys'.format(deploy_repo=deploy_key_repo)
if args.upload_key:
upload_GitHub_deploy_key(deploy_key_repo, public_ssh_key, **login_kwargs)
print(dedent(.format(deploy_repo=deploy_key_repo, deploy_keys_url=deploy_keys_url, keypath=keypath)))
print(header)
else:
print(header)
print(dedent(.format(ssh_key=public_ssh_key, deploy_keys_url=deploy_keys_url, N=N,
BOLD_MAGENTA=BOLD_MAGENTA, RESET=RESET)))
print(dedent(.format(keypath=keypath, N=N, BOLD_MAGENTA=BOLD_MAGENTA, RESET=RESET)))
options = '--built-docs ' + bold_black('<path/to/built/html/>')
if args.key_path:
options += ' --key-path {keypath}.enc'.format(keypath=keypath)
if deploy_repo != build_repo:
options += ' --deploy-repo {deploy_repo}'.format(deploy_repo=deploy_repo)
key_type = "deploy key"
if args.token:
options += ' --token'
key_type = "personal access token"
print(dedent(.format(options=options, N=N, key_type=key_type,
encrypted_variable=encrypted_variable.decode('utf-8'),
deploy_repo=deploy_repo, BOLD_MAGENTA=BOLD_MAGENTA,
BOLD_BLACK=BOLD_BLACK, RESET=RESET)))
print(dedent(.format(BOLD_BLACK=BOLD_BLACK, RESET=RESET)))
print(dedent())
print(dedent(.format(N=N, BOLD_MAGENTA=BOLD_MAGENTA, RESET=RESET)))
print("See the documentation at https://drdoctr.github.io/ for more information.") | [
"\n Color guide\n\n - red: Error and warning messages\n - green: Welcome messages (use sparingly)\n - blue: Default values\n - bold_magenta: Action items\n - bold_black: Parts of code to be run or copied that should be modified\n ",
"\\\n Welcome to Doctr.\n\n We need to ask you a few questions to get you on your way to automatically\n deploying from Travis CI to GitHub pages.\n ",
"\n A personal access token for doctr has been created.\n\n You can go to https://github.com/settings/tokens to revoke it.",
"\n The deploy key has been added for {deploy_repo}.\n\n You can go to {deploy_keys_url} to revoke the deploy key.\\\n ",
"\\\n {N}. {BOLD_MAGENTA}Go to {deploy_keys_url}\n and add the following as a new key:{RESET}\n\n {ssh_key}\n {BOLD_MAGENTA}Be sure to allow write access for the key.{RESET}\n ",
"\\\n {N}. {BOLD_MAGENTA}Add the file {keypath}.enc to be staged for commit:{RESET}\n\n git add {keypath}.enc\n ",
"\\\n {N}. {BOLD_MAGENTA}Add these lines to your `.travis.yml` file:{RESET}\n\n env:\n global:\n # Doctr {key_type} for {deploy_repo}\n - secure: \"{encrypted_variable}\"\n\n script:\n - set -e\n - {BOLD_BLACK}<Command to build your docs>{RESET}\n - pip install doctr\n - doctr deploy {options} {BOLD_BLACK}<target-directory>{RESET}\n ",
"\\\n Replace the text in {BOLD_BLACK}<angle brackets>{RESET} with the relevant\n things for your repository.\n ",
"\\\n Note: the `set -e` prevents doctr from running when the docs build fails.\n We put this code under `script:` so that if doctr fails it causes the\n build to fail.\n ",
"\\\n {N}. {BOLD_MAGENTA}Commit and push these changes to your GitHub repository.{RESET}\n The docs should now build automatically on Travis.\n "
]
|
Please provide a description of the function:def decrypt_file(file, key):
if not file.endswith('.enc'):
raise ValueError("%s does not end with .enc" % file)
fer = Fernet(key)
with open(file, 'rb') as f:
decrypted_file = fer.decrypt(f.read())
with open(file[:-4], 'wb') as f:
f.write(decrypted_file)
os.chmod(file[:-4], 0o600) | [
"\n Decrypts the file ``file``.\n\n The encrypted file is assumed to end with the ``.enc`` extension. The\n decrypted file is saved to the same location without the ``.enc``\n extension.\n\n The permissions on the decrypted file are automatically set to 0o600.\n\n See also :func:`doctr.local.encrypt_file`.\n\n "
]
|
Please provide a description of the function:def setup_deploy_key(keypath='github_deploy_key', key_ext='.enc', env_name='DOCTR_DEPLOY_ENCRYPTION_KEY'):
key = os.environ.get(env_name, os.environ.get("DOCTR_DEPLOY_ENCRYPTION_KEY", None))
if not key:
raise RuntimeError("{env_name} or DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set. Make sure you followed the instructions from 'doctr configure' properly. You may need to re-run 'doctr configure' to fix this error."
.format(env_name=env_name))
# Legacy keyfile name
if (not os.path.isfile(keypath + key_ext) and
os.path.isfile('github_deploy_key' + key_ext)):
keypath = 'github_deploy_key'
key_filename = os.path.basename(keypath)
key = key.encode('utf-8')
decrypt_file(keypath + key_ext, key)
key_path = os.path.expanduser("~/.ssh/" + key_filename)
os.makedirs(os.path.expanduser("~/.ssh"), exist_ok=True)
os.rename(keypath, key_path)
with open(os.path.expanduser("~/.ssh/config"), 'a') as f:
f.write("Host github.com"
' IdentityFile "%s"'
" LogLevel ERROR\n" % key_path)
# start ssh-agent and add key to it
# info from SSH agent has to be put into the environment
agent_info = subprocess.check_output(['ssh-agent', '-s'])
agent_info = agent_info.decode('utf-8')
agent_info = agent_info.split()
AUTH_SOCK = agent_info[0].split('=')[1][:-1]
AGENT_PID = agent_info[3].split('=')[1][:-1]
os.putenv('SSH_AUTH_SOCK', AUTH_SOCK)
os.putenv('SSH_AGENT_PID', AGENT_PID)
run(['ssh-add', os.path.expanduser('~/.ssh/' + key_filename)]) | [
"\n Decrypts the deploy key and configures it with ssh\n\n The key is assumed to be encrypted as keypath + key_ext, and the\n encryption key is assumed to be set in the environment variable\n ``env_name``. If ``env_name`` is not set, it falls back to\n ``DOCTR_DEPLOY_ENCRYPTION_KEY`` for backwards compatibility.\n\n If keypath + key_ext does not exist, it falls back to\n ``github_deploy_key.enc`` for backwards compatibility.\n "
]
|
Please provide a description of the function:def get_token():
token = os.environ.get("GH_TOKEN", None)
if not token:
token = "GH_TOKEN environment variable not set"
token = token.encode('utf-8')
return token | [
"\n Get the encrypted GitHub token in Travis.\n\n Make sure the contents this variable do not leak. The ``run()`` function\n will remove this from the output, so always use it.\n "
]
|
Please provide a description of the function:def run(args, shell=False, exit=True):
if "GH_TOKEN" in os.environ:
token = get_token()
else:
token = b''
if not shell:
command = ' '.join(map(shlex.quote, args))
else:
command = args
command = command.replace(token.decode('utf-8'), '~'*len(token))
print(blue(command))
sys.stdout.flush()
returncode = run_command_hiding_token(args, token, shell=shell)
if exit and returncode != 0:
sys.exit(red("%s failed: %s" % (command, returncode)))
return returncode | [
"\n Run the command ``args``.\n\n Automatically hides the secret GitHub token from the output.\n\n If shell=False (recommended for most commands), args should be a list of\n strings. If shell=True, args should be a string of the command to run.\n\n If exit=True, it exits on nonzero returncode. Otherwise it returns the\n returncode.\n "
]
|
Please provide a description of the function:def get_current_repo():
remote_url = subprocess.check_output(['git', 'config', '--get',
'remote.origin.url']).decode('utf-8')
# Travis uses the https clone url
_, org, git_repo = remote_url.rsplit('.git', 1)[0].rsplit('/', 2)
return (org + '/' + git_repo) | [
"\n Get the GitHub repo name for the current directory.\n\n Assumes that the repo is in the ``origin`` remote.\n "
]
|
Please provide a description of the function:def get_travis_branch():
if os.environ.get("TRAVIS_PULL_REQUEST", "") == "true":
return os.environ.get("TRAVIS_PULL_REQUEST_BRANCH", "")
else:
return os.environ.get("TRAVIS_BRANCH", "") | [
"Get the name of the branch that the PR is from.\n\n Note that this is not simply ``$TRAVIS_BRANCH``. the ``push`` build will\n use the correct branch (the branch that the PR is from) but the ``pr``\n build will use the _target_ of the PR (usually master). So instead, we ask\n for ``$TRAVIS_PULL_REQUEST_BRANCH`` if it's a PR build, and\n ``$TRAVIS_BRANCH`` if it's a push build.\n "
]
|
Please provide a description of the function:def setup_GitHub_push(deploy_repo, *, auth_type='deploy_key',
full_key_path='github_deploy_key.enc', require_master=None,
branch_whitelist=None, deploy_branch='gh-pages',
env_name='DOCTR_DEPLOY_ENCRYPTION_KEY', build_tags=False):
# Set to the name of the tag for tag builds
TRAVIS_TAG = os.environ.get("TRAVIS_TAG", "")
if branch_whitelist is None:
branch_whitelist={'master'}
if require_master is not None:
import warnings
warnings.warn("`setup_GitHub_push`'s `require_master` argument in favor of `branch_whitelist=['master']`",
DeprecationWarning,
stacklevel=2)
branch_whitelist.add('master')
if auth_type not in ['deploy_key', 'token']:
raise ValueError("auth_type must be 'deploy_key' or 'token'")
TRAVIS_BRANCH = os.environ.get("TRAVIS_BRANCH", "")
TRAVIS_PULL_REQUEST = os.environ.get("TRAVIS_PULL_REQUEST", "")
# Check if the repo is a fork
TRAVIS_REPO_SLUG = os.environ["TRAVIS_REPO_SLUG"]
REPO_URL = 'https://api.github.com/repos/{slug}'
r = requests.get(REPO_URL.format(slug=TRAVIS_REPO_SLUG))
fork = r.json().get('fork', False)
canpush = determine_push_rights(
branch_whitelist=branch_whitelist,
TRAVIS_BRANCH=TRAVIS_BRANCH,
TRAVIS_PULL_REQUEST=TRAVIS_PULL_REQUEST,
fork=fork,
TRAVIS_TAG=TRAVIS_TAG,
build_tags=build_tags)
print("Setting git attributes")
set_git_user_email()
remotes = subprocess.check_output(['git', 'remote']).decode('utf-8').split('\n')
if 'doctr_remote' in remotes:
print("doctr_remote already exists, removing")
run(['git', 'remote', 'remove', 'doctr_remote'])
print("Adding doctr remote")
if canpush:
if auth_type == 'token':
token = get_token()
run(['git', 'remote', 'add', 'doctr_remote',
'https://{token}@github.com/{deploy_repo}.git'.format(token=token.decode('utf-8'),
deploy_repo=deploy_repo)])
else:
keypath, key_ext = full_key_path.rsplit('.', 1)
key_ext = '.' + key_ext
try:
setup_deploy_key(keypath=keypath, key_ext=key_ext, env_name=env_name)
except RuntimeError:
# Rate limits prevent this check from working every time. By default, we
# assume it isn't a fork so that things just work on non-fork builds.
if r.status_code == 403:
print(yellow("Warning: GitHub's API rate limits prevented doctr from detecting if this build is a forked repo. If it is, you may ignore the 'DOCTR_DEPLOY_ENCRYPTION_KEY environment variable is not set' error that follows. If it is not, you should re-run 'doctr configure'. Note that doctr cannot deploy from fork builds due to limitations in Travis."), file=sys.stderr)
raise
run(['git', 'remote', 'add', 'doctr_remote',
'[email protected]:{deploy_repo}.git'.format(deploy_repo=deploy_repo)])
else:
print('setting a read-only GitHub doctr_remote')
run(['git', 'remote', 'add', 'doctr_remote',
'https://github.com/{deploy_repo}.git'.format(deploy_repo=deploy_repo)])
print("Fetching doctr remote")
run(['git', 'fetch', 'doctr_remote'])
return canpush | [
"\n Setup the remote to push to GitHub (to be run on Travis).\n\n ``auth_type`` should be either ``'deploy_key'`` or ``'token'``.\n\n For ``auth_type='token'``, this sets up the remote with the token and\n checks out the gh-pages branch. The token to push to GitHub is assumed to be in the ``GH_TOKEN`` environment\n variable.\n\n For ``auth_type='deploy_key'``, this sets up the remote with ssh access.\n "
]
|
Please provide a description of the function:def set_git_user_email():
username = subprocess.run(shlex.split('git config user.name'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not username or username == "Travis CI User":
run(['git', 'config', '--global', 'user.name', "Doctr (Travis CI)"])
else:
print("Not setting git user name, as it's already set to %r" % username)
email = subprocess.run(shlex.split('git config user.email'), stdout=subprocess.PIPE).stdout.strip().decode('utf-8')
if not email or email == "[email protected]":
# We need a dummy email or git will fail. We use this one as per
# https://help.github.com/articles/keeping-your-email-address-private/.
run(['git', 'config', '--global', 'user.email', '[email protected]'])
else:
print("Not setting git user email, as it's already set to %r" % email) | [
"\n Set global user and email for git user if not already present on system\n "
]
|
Please provide a description of the function:def checkout_deploy_branch(deploy_branch, canpush=True):
# Create an empty branch with .nojekyll if it doesn't already exist
create_deploy_branch(deploy_branch, push=canpush)
remote_branch = "doctr_remote/{}".format(deploy_branch)
print("Checking out doctr working branch tracking", remote_branch)
clear_working_branch()
# If gh-pages doesn't exist the above create_deploy_branch() will create
# it we can push, but if we can't, it won't and the --track would fail.
if run(['git', 'rev-parse', '--verify', remote_branch], exit=False) == 0:
extra_args = ['--track', remote_branch]
else:
extra_args = []
run(['git', 'checkout', '-b', DOCTR_WORKING_BRANCH] + extra_args)
print("Done")
return canpush | [
"\n Checkout the deploy branch, creating it if it doesn't exist.\n "
]
|
Please provide a description of the function:def deploy_branch_exists(deploy_branch):
remote_name = 'doctr_remote'
branch_names = subprocess.check_output(['git', 'branch', '-r']).decode('utf-8').split()
return '{}/{}'.format(remote_name, deploy_branch) in branch_names | [
"\n Check if there is a remote branch with name specified in ``deploy_branch``.\n\n Note that default ``deploy_branch`` is ``gh-pages`` for regular repos and\n ``master`` for ``github.io`` repos.\n\n This isn't completely robust. If there are multiple remotes and you have a\n ``deploy_branch`` branch on the non-default remote, this won't see it.\n "
]
|
Please provide a description of the function:def create_deploy_branch(deploy_branch, push=True):
if not deploy_branch_exists(deploy_branch):
print("Creating {} branch on doctr_remote".format(deploy_branch))
clear_working_branch()
run(['git', 'checkout', '--orphan', DOCTR_WORKING_BRANCH])
# delete everything in the new ref. this is non-destructive to existing
# refs/branches, etc...
run(['git', 'rm', '-rf', '.'])
print("Adding .nojekyll file to working branch")
run(['touch', '.nojekyll'])
run(['git', 'add', '.nojekyll'])
run(['git', 'commit', '-m', 'Create new {} branch with .nojekyll'.format(deploy_branch)])
if push:
print("Pushing working branch to remote {} branch".format(deploy_branch))
run(['git', 'push', '-u', 'doctr_remote', '{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)])
# return to master branch and clear the working branch
run(['git', 'checkout', 'master'])
run(['git', 'branch', '-D', DOCTR_WORKING_BRANCH])
# fetch the remote so that doctr_remote/{deploy_branch} is resolved
run(['git', 'fetch', 'doctr_remote'])
return True
return False | [
"\n If there is no remote branch with name specified in ``deploy_branch``,\n create one.\n\n Note that default ``deploy_branch`` is ``gh-pages`` for regular\n repos and ``master`` for ``github.io`` repos.\n\n Return True if ``deploy_branch`` was created, False if not.\n "
]
|
Please provide a description of the function:def find_sphinx_build_dir():
build = glob.glob('**/*build/html', recursive=True)
if not build:
raise RuntimeError("Could not find Sphinx build directory automatically")
build_folder = build[0]
return build_folder | [
"\n Find build subfolder within sphinx docs directory.\n\n This is called by :func:`commit_docs` if keyword arg ``built_docs`` is not\n specified on the command line.\n "
]
|
Please provide a description of the function:def copy_to_tmp(source):
tmp_dir = tempfile.mkdtemp()
# Use pathlib because os.path.basename is different depending on whether
# the path ends in a /
p = pathlib.Path(source)
dirname = p.name or 'temp'
new_dir = os.path.join(tmp_dir, dirname)
if os.path.isdir(source):
shutil.copytree(source, new_dir)
else:
shutil.copy2(source, new_dir)
return new_dir | [
"\n Copies ``source`` to a temporary directory, and returns the copied\n location.\n\n If source is a file, the copied location is also a file.\n "
]
|
Please provide a description of the function:def is_subdir(a, b):
a, b = map(os.path.abspath, [a, b])
return os.path.commonpath([a, b]) == b | [
"\n Return true if a is a subdirectory of b\n "
]
|
Please provide a description of the function:def sync_from_log(src, dst, log_file, exclude=()):
from os.path import join, exists, isdir
exclude = [os.path.normpath(i) for i in exclude]
added, removed = [], []
if not exists(log_file):
# Assume this is the first run
print("%s doesn't exist. Not removing any files." % log_file)
else:
with open(log_file) as f:
files = f.read().strip().split('\n')
for new_f in files:
new_f = new_f.strip()
if any(is_subdir(new_f, os.path.join(dst, i)) for i in exclude):
pass
elif exists(new_f):
os.remove(new_f)
removed.append(new_f)
else:
print("Warning: File %s doesn't exist." % new_f, file=sys.stderr)
if os.path.isdir(src):
if not src.endswith(os.sep):
src += os.sep
files = glob.iglob(join(src, '**'), recursive=True)
else:
files = [src]
src = os.path.dirname(src) + os.sep if os.sep in src else ''
os.makedirs(dst, exist_ok=True)
# sorted makes this easier to test
for f in sorted(files):
if any(is_subdir(f, os.path.join(src, i)) for i in exclude):
continue
new_f = join(dst, f[len(src):])
if isdir(f) or f.endswith(os.sep):
os.makedirs(new_f, exist_ok=True)
else:
shutil.copy2(f, new_f)
added.append(new_f)
if new_f in removed:
removed.remove(new_f)
with open(log_file, 'w') as f:
f.write('\n'.join(added))
added.append(log_file)
return added, removed | [
"\n Sync the files in ``src`` to ``dst``.\n\n The files that are synced are logged to ``log_file``. If ``log_file``\n exists, the files in ``log_file`` are removed first.\n\n Returns ``(added, removed)``, where added is a list of all files synced from\n ``src`` (even if it already existed in ``dst``), and ``removed`` is every\n file from ``log_file`` that was removed from ``dst`` because it wasn't in\n ``src``. ``added`` also includes the log file.\n\n ``exclude`` may be a list of paths from ``src`` that should be ignored.\n Such paths are neither added nor removed, even if they are in the logfile.\n "
]
|
Please provide a description of the function:def commit_docs(*, added, removed):
TRAVIS_BUILD_NUMBER = os.environ.get("TRAVIS_BUILD_NUMBER", "<unknown>")
TRAVIS_BRANCH = os.environ.get("TRAVIS_BRANCH", "<unknown>")
TRAVIS_COMMIT = os.environ.get("TRAVIS_COMMIT", "<unknown>")
TRAVIS_REPO_SLUG = os.environ.get("TRAVIS_REPO_SLUG", "<unknown>")
TRAVIS_JOB_WEB_URL = os.environ.get("TRAVIS_JOB_WEB_URL", "<unknown>")
TRAVIS_TAG = os.environ.get("TRAVIS_TAG", "")
branch = "tag" if TRAVIS_TAG else "branch"
DOCTR_COMMAND = ' '.join(map(shlex.quote, sys.argv))
if added:
run(['git', 'add', *added])
if removed:
run(['git', 'rm', *removed])
commit_message = .format(
branch=branch,
TRAVIS_BUILD_NUMBER=TRAVIS_BUILD_NUMBER,
TRAVIS_BRANCH=TRAVIS_BRANCH,
TRAVIS_COMMIT=TRAVIS_COMMIT,
TRAVIS_REPO_SLUG=TRAVIS_REPO_SLUG,
TRAVIS_JOB_WEB_URL=TRAVIS_JOB_WEB_URL,
DOCTR_COMMAND=DOCTR_COMMAND,
)
# Only commit if there were changes
if run(['git', 'diff-index', '--exit-code', '--cached', '--quiet', 'HEAD', '--'], exit=False) != 0:
print("Committing")
run(['git', 'commit', '-am', commit_message])
return True
return False | [
"\n Commit the docs to the current branch\n\n Assumes that :func:`setup_GitHub_push`, which sets up the ``doctr_remote``\n remote, has been run.\n\n Returns True if changes were committed and False if no changes were\n committed.\n ",
"\\\nUpdate docs after building Travis build {TRAVIS_BUILD_NUMBER} of\n{TRAVIS_REPO_SLUG}\n\nThe docs were built from the {branch} '{TRAVIS_BRANCH}' against the commit\n{TRAVIS_COMMIT}.\n\nThe Travis build that generated this commit is at\n{TRAVIS_JOB_WEB_URL}.\n\nThe doctr command that was run is\n\n {DOCTR_COMMAND}\n"
]
|
Please provide a description of the function:def push_docs(deploy_branch='gh-pages', retries=5):
code = 1
while code and retries:
print("Pulling")
code = run(['git', 'pull', '-s', 'recursive', '-X', 'ours',
'doctr_remote', deploy_branch], exit=False)
print("Pushing commit")
code = run(['git', 'push', '-q', 'doctr_remote',
'{}:{}'.format(DOCTR_WORKING_BRANCH, deploy_branch)], exit=False)
if code:
retries -= 1
print("Push failed, retrying")
time.sleep(1)
else:
return
sys.exit("Giving up...") | [
"\n Push the changes to the branch named ``deploy_branch``.\n\n Assumes that :func:`setup_GitHub_push` has been run and returned True, and\n that :func:`commit_docs` has been run. Does not push anything if no changes\n were made.\n\n "
]
|
Please provide a description of the function:def determine_push_rights(*, branch_whitelist, TRAVIS_BRANCH,
TRAVIS_PULL_REQUEST, TRAVIS_TAG, build_tags, fork):
canpush = True
if TRAVIS_TAG:
if not build_tags:
print("The docs are not pushed on tag builds. To push on future tag builds, use --build-tags")
return build_tags
if not any([re.compile(x).match(TRAVIS_BRANCH) for x in branch_whitelist]):
print("The docs are only pushed to gh-pages from master. To allow pushing from "
"a non-master branch, use the --no-require-master flag", file=sys.stderr)
print("This is the {TRAVIS_BRANCH} branch".format(TRAVIS_BRANCH=TRAVIS_BRANCH), file=sys.stderr)
canpush = False
if TRAVIS_PULL_REQUEST != "false":
print("The website and docs are not pushed to gh-pages on pull requests", file=sys.stderr)
canpush = False
if fork:
print("The website and docs are not pushed to gh-pages on fork builds.", file=sys.stderr)
canpush = False
if last_commit_by_doctr():
print(red("The last commit on this branch was pushed by doctr. Not pushing to "
"avoid an infinite build-loop."), file=sys.stderr)
canpush = False
return canpush | [
"Check if Travis is running on ``master`` (or a whitelisted branch) to\n determine if we can/should push the docs to the deploy repo\n "
]
|
Please provide a description of the function:def clean_path(p):
p = os.path.expanduser(p)
p = os.path.expandvars(p)
p = os.path.abspath(p)
return p | [
" Clean a path by expanding user and environment variables and\n ensuring absolute path.\n "
]
|
Please provide a description of the function:def guess_organization():
try:
stdout = subprocess.check_output('git config --get user.name'.split())
org = stdout.strip().decode("UTF-8")
except:
org = getpass.getuser()
if sys.version_info[0] == 2:
# only decode when python version is 2.x
org = org.decode("UTF-8")
return org | [
" Guess the organization from `git config`. If that can't be found,\n fall back to $USER environment variable.\n "
]
|
Please provide a description of the function:def load_file_template(path):
template = StringIO()
if not os.path.exists(path):
raise ValueError("path does not exist: %s" % path)
with open(clean_path(path), "rb") as infile: # opened as binary
for line in infile:
template.write(line.decode("utf-8")) # ensure utf-8
return template | [
" Load template from the specified filesystem path.\n "
]
|
Please provide a description of the function:def load_package_template(license, header=False):
content = StringIO()
filename = 'template-%s-header.txt' if header else 'template-%s.txt'
with resource_stream(__name__, filename % license) as licfile:
for line in licfile:
content.write(line.decode("utf-8")) # write utf-8 string
return content | [
" Load license template distributed with package.\n "
]
|
Please provide a description of the function:def extract_vars(template):
keys = set()
for match in re.finditer(r"\{\{ (?P<key>\w+) \}\}", template.getvalue()):
keys.add(match.groups()[0])
return sorted(list(keys)) | [
" Extract variables from template. Variables are enclosed in\n double curly braces.\n "
]
|
Please provide a description of the function:def generate_license(template, context):
out = StringIO()
content = template.getvalue()
for key in extract_vars(template):
if key not in context:
raise ValueError("%s is missing from the template context" % key)
content = content.replace("{{ %s }}" % key, context[key])
template.close() # free template memory (when is garbage collected?)
out.write(content)
return out | [
" Generate a license by extracting variables from the template and\n replacing them with the corresponding values in the given context.\n "
]
|
Please provide a description of the function:def format_license(template, lang):
if not lang:
lang = 'txt'
out = StringIO()
template.seek(0) # from the start of the buffer
out.write(LANG_CMT[LANGS[lang]][0] + u'\n')
for line in template.readlines():
out.write(LANG_CMT[LANGS[lang]][1] + u' ')
out.write(line)
out.write(LANG_CMT[LANGS[lang]][2] + u'\n')
template.close() # force garbage collector
return out | [
" Format the StringIO template object for specified lang string:\n return StringIO object formatted\n "
]
|
Please provide a description of the function:def get_suffix(name):
a = name.count(".")
if a:
ext = name.split(".")[-1]
if ext in LANGS.keys():
return ext
return False
else:
return False | [
"Check if file name have valid suffix for formatting.\n if have suffix return it else return False.\n "
]
|
Please provide a description of the function:def coerce(cls, key, value):
"Convert plain dictionaries to MutableDict."
if not isinstance(value, MutableDict):
if isinstance(value, dict):
return MutableDict(value)
# this call will raise ValueError
return Mutable.coerce(key, value)
else:
return value | []
|
Please provide a description of the function:def _raise_for_status(response):
message = ''
if 400 <= response.status < 500:
message = '%s Client Error: %s' % (response.status, response.reason)
elif 500 <= response.status < 600:
message = '%s Server Error: %s' % (response.status, response.reason)
else:
return
if response.status == 503:
raise ConnectionError(message)
if response.headers.get("content-type", "").startswith("application/json"):
data = json.loads(response.data.decode('utf-8'))
error = data.get('error', {})
error_trace = data.get('error_trace', None)
if "results" in data:
errors = [res["error_message"] for res in data["results"]
if res.get("error_message")]
if errors:
raise ProgrammingError("\n".join(errors))
if isinstance(error, dict):
raise ProgrammingError(error.get('message', ''),
error_trace=error_trace)
raise ProgrammingError(error, error_trace=error_trace)
raise ProgrammingError(message) | [
" make sure that only crate.exceptions are raised that are defined in\n the DB-API specification "
]
|
Please provide a description of the function:def _server_url(server):
if not _HTTP_PAT.match(server):
server = 'http://%s' % server
parsed = urlparse(server)
url = '%s://%s' % (parsed.scheme, parsed.netloc)
return url | [
"\n Normalizes a given server string to an url\n\n >>> print(_server_url('a'))\n http://a\n >>> print(_server_url('a:9345'))\n http://a:9345\n >>> print(_server_url('https://a:9345'))\n https://a:9345\n >>> print(_server_url('https://a'))\n https://a\n >>> print(_server_url('demo.crate.io'))\n http://demo.crate.io\n "
]
|
Please provide a description of the function:def request(self,
method,
path,
data=None,
stream=False,
headers=None,
username=None,
password=None,
schema=None,
**kwargs):
if headers is None:
headers = {}
if 'Content-Length' not in headers:
length = super_len(data)
if length is not None:
headers['Content-Length'] = length
# Authentication credentials
if username is not None:
if 'Authorization' not in headers and username is not None:
credentials = username + ':'
if password is not None:
credentials += password
headers['Authorization'] = 'Basic %s' % b64encode(credentials.encode('utf-8')).decode('utf-8')
# For backwards compatibility with Crate <= 2.2
if 'X-User' not in headers:
headers['X-User'] = username
if schema is not None:
headers['Default-Schema'] = schema
headers['Accept'] = 'application/json'
headers['Content-Type'] = 'application/json'
kwargs['assert_same_host'] = False
kwargs['redirect'] = False
kwargs['retries'] = Retry(read=0)
return self.pool.urlopen(
method,
path,
body=data,
preload_content=not stream,
headers=headers,
**kwargs
) | [
"Send a request\n\n Always set the Content-Length and the Content-Type header.\n "
]
|
Please provide a description of the function:def sql(self, stmt, parameters=None, bulk_parameters=None):
if stmt is None:
return None
data = _create_sql_payload(stmt, parameters, bulk_parameters)
logger.debug(
'Sending request to %s with payload: %s', self.path, data)
content = self._json_request('POST', self.path, data=data)
logger.debug("JSON response for stmt(%s): %s", stmt, content)
return content | [
"\n Execute SQL stmt against the crate server.\n "
]
|
Please provide a description of the function:def blob_put(self, table, digest, data):
response = self._request('PUT', _blob_path(table, digest),
data=data)
if response.status == 201:
# blob created
return True
if response.status == 409:
# blob exists
return False
if response.status in (400, 404):
raise BlobLocationNotFoundException(table, digest)
_raise_for_status(response) | [
"\n Stores the contents of the file like @data object in a blob under the\n given table and digest.\n "
]
|
Please provide a description of the function:def blob_get(self, table, digest, chunk_size=1024 * 128):
response = self._request('GET', _blob_path(table, digest), stream=True)
if response.status == 404:
raise DigestNotFoundException(table, digest)
_raise_for_status(response)
return response.stream(amt=chunk_size) | [
"\n Returns a file like object representing the contents of the blob\n with the given digest.\n "
]
|
Please provide a description of the function:def blob_exists(self, table, digest):
response = self._request('HEAD', _blob_path(table, digest))
if response.status == 200:
return True
elif response.status == 404:
return False
_raise_for_status(response) | [
"\n Returns true if the blob with the given digest exists\n under the given table.\n "
]
|
Please provide a description of the function:def _request(self, method, path, server=None, **kwargs):
while True:
next_server = server or self._get_server()
try:
response = self.server_pool[next_server].request(
method, path, username=self.username, password=self.password, schema=self.schema, **kwargs)
redirect_location = response.get_redirect_location()
if redirect_location and 300 <= response.status <= 308:
redirect_server = _server_url(redirect_location)
self._add_server(redirect_server)
return self._request(
method, path, server=redirect_server, **kwargs)
if not server and response.status in SRV_UNAVAILABLE_STATUSES:
with self._lock:
# drop server from active ones
self._drop_server(next_server, response.reason)
else:
return response
except (urllib3.exceptions.MaxRetryError,
urllib3.exceptions.ReadTimeoutError,
urllib3.exceptions.SSLError,
urllib3.exceptions.HTTPError,
urllib3.exceptions.ProxyError,) as ex:
ex_message = _ex_to_message(ex)
if server:
raise ConnectionError(
"Server not available, exception: %s" % ex_message
)
preserve_server = False
if isinstance(ex, urllib3.exceptions.ProtocolError):
preserve_server = any(
t in [type(arg) for arg in ex.args]
for t in PRESERVE_ACTIVE_SERVER_EXCEPTIONS
)
if (not preserve_server):
with self._lock:
# drop server from active ones
self._drop_server(next_server, ex_message)
except Exception as e:
raise ProgrammingError(_ex_to_message(e)) | [
"Execute a request to the cluster\n\n A server is selected from the server pool.\n "
]
|
Please provide a description of the function:def _json_request(self, method, path, data):
response = self._request(method, path, data=data)
_raise_for_status(response)
if len(response.data) > 0:
return _json_from_response(response)
return response.data | [
"\n Issue request against the crate HTTP API.\n "
]
|
Please provide a description of the function:def _get_server(self):
with self._lock:
inactive_server_count = len(self._inactive_servers)
for i in range(inactive_server_count):
try:
ts, server, message = heapq.heappop(self._inactive_servers)
except IndexError:
pass
else:
if (ts + self.retry_interval) > time():
# Not yet, put it back
heapq.heappush(self._inactive_servers,
(ts, server, message))
else:
self._active_servers.append(server)
logger.warn("Restored server %s into active pool",
server)
# if none is old enough, use oldest
if not self._active_servers:
ts, server, message = heapq.heappop(self._inactive_servers)
self._active_servers.append(server)
logger.info("Restored server %s into active pool", server)
server = self._active_servers[0]
self._roundrobin()
return server | [
"\n Get server to use for request.\n Also process inactive server list, re-add them after given interval.\n "
]
|
Please provide a description of the function:def _drop_server(self, server, message):
try:
self._active_servers.remove(server)
except ValueError:
pass
else:
heapq.heappush(self._inactive_servers, (time(), server, message))
logger.warning("Removed server %s from active pool", server)
# if this is the last server raise exception, otherwise try next
if not self._active_servers:
raise ConnectionError(
("No more Servers available, "
"exception from last server: %s") % message) | [
"\n Drop server from active list and adds it to the inactive ones.\n "
]
|
Please provide a description of the function:def match(column, term, match_type=None, options=None):
return Match(column, term, match_type, options) | [
"Generates match predicate for fulltext search\n\n :param column: A reference to a column or an index, or a subcolumn, or a\n dictionary of subcolumns with boost values.\n\n :param term: The term to match against. This string is analyzed and the\n resulting tokens are compared to the index.\n\n :param match_type (optional): The match type. Determine how the term is\n applied and the score calculated.\n\n :param options (optional): The match options. Specify match type behaviour.\n (Not possible without a specified match type.) Match options must be\n supplied as a dictionary.\n "
]
|
Please provide a description of the function:def put(self, f, digest=None):
if digest:
actual_digest = digest
else:
actual_digest = self._compute_digest(f)
created = self.conn.client.blob_put(self.container_name,
actual_digest, f)
if digest:
return created
return actual_digest | [
"\n Upload a blob\n\n :param f:\n File object to be uploaded (required to support seek if digest is\n not provided).\n :param digest:\n Optional SHA-1 hex digest of the file contents. Gets computed\n before actual upload if not provided, which requires an extra file\n read.\n :return:\n The hex digest of the uploaded blob if not provided in the call.\n Otherwise a boolean indicating if the blob has been newly created.\n "
]
|
Please provide a description of the function:def get(self, digest, chunk_size=1024 * 128):
return self.conn.client.blob_get(self.container_name, digest,
chunk_size) | [
"\n Return the contents of a blob\n\n :param digest: the hex digest of the blob to return\n :param chunk_size: the size of the chunks returned on each iteration\n :return: generator returning chunks of data\n "
]
|
Please provide a description of the function:def delete(self, digest):
return self.conn.client.blob_del(self.container_name, digest) | [
"\n Delete a blob\n\n :param digest: the hex digest of the blob to be deleted\n :return: True if blob existed\n "
]
|
Please provide a description of the function:def exists(self, digest):
return self.conn.client.blob_exists(self.container_name, digest) | [
"\n Check if a blob exists\n\n :param digest: Hex digest of the blob\n :return: Boolean indicating existence of the blob\n "
]
|
Please provide a description of the function:def execute(self, sql, parameters=None, bulk_parameters=None):
if self.connection._closed:
raise ProgrammingError("Connection closed")
if self._closed:
raise ProgrammingError("Cursor closed")
self._result = self.connection.client.sql(sql, parameters,
bulk_parameters)
if "rows" in self._result:
self.rows = iter(self._result["rows"]) | [
"\n Prepare and execute a database operation (query or command).\n "
]
|
Please provide a description of the function:def executemany(self, sql, seq_of_parameters):
row_counts = []
durations = []
if self.connection.lowest_server_version >= BULK_INSERT_MIN_VERSION:
self.execute(sql, bulk_parameters=seq_of_parameters)
for result in self._result.get('results', []):
if result.get('rowcount') > -1:
row_counts.append(result.get('rowcount'))
if self.duration > -1:
durations.append(self.duration)
else:
for params in seq_of_parameters:
self.execute(sql, parameters=params)
if self.rowcount > -1:
row_counts.append(self.rowcount)
if self.duration > -1:
durations.append(self.duration)
self._result = {
"rowcount": sum(row_counts) if row_counts else -1,
"duration": sum(durations) if durations else -1,
"rows": [],
"cols": self._result.get("cols", []),
"results": self._result.get("results")
}
self.rows = iter(self._result["rows"])
return self._result["results"] | [
"\n Prepare a database operation (query or command) and then execute it\n against all parameter sequences or mappings found in the sequence\n ``seq_of_parameters``.\n "
]
|
Please provide a description of the function:def fetchmany(self, count=None):
if count is None:
count = self.arraysize
if count == 0:
return self.fetchall()
result = []
for i in range(count):
try:
result.append(self.next())
except StopIteration:
pass
return result | [
"\n Fetch the next set of rows of a query result, returning a sequence of\n sequences (e.g. a list of tuples). An empty sequence is returned when\n no more rows are available.\n "
]
|
Please provide a description of the function:def fetchall(self):
result = []
iterate = True
while iterate:
try:
result.append(self.next())
except StopIteration:
iterate = False
return result | [
"\n Fetch all (remaining) rows of a query result, returning them as a\n sequence of sequences (e.g. a list of tuples). Note that the cursor's\n arraysize attribute can affect the performance of this operation.\n "
]
|
Please provide a description of the function:def rowcount(self):
if (self._closed or not self._result or "rows" not in self._result):
return -1
return self._result.get("rowcount", -1) | [
"\n This read-only attribute specifies the number of rows that the last\n .execute*() produced (for DQL statements like ``SELECT``) or affected\n (for DML statements like ``UPDATE`` or ``INSERT``).\n "
]
|
Please provide a description of the function:def next(self):
if self.rows is None:
raise ProgrammingError(
"No result available. " +
"execute() or executemany() must be called first."
)
elif not self._closed:
return next(self.rows)
else:
raise ProgrammingError("Cursor closed") | [
"\n Return the next row of a query result set, respecting if cursor was\n closed.\n "
]
|
Please provide a description of the function:def description(self):
if self._closed:
return
description = []
for col in self._result["cols"]:
description.append((col,
None,
None,
None,
None,
None,
None))
return tuple(description) | [
"\n This read-only attribute is a sequence of 7-item sequences.\n "
]
|
Please provide a description of the function:def duration(self):
if self._closed or \
not self._result or \
"duration" not in self._result:
return -1
return self._result.get("duration", 0) | [
"\n This read-only attribute specifies the server-side duration of a query\n in milliseconds.\n "
]
|
Please provide a description of the function:def connect(servers=None,
timeout=None,
client=None,
verify_ssl_cert=False,
ca_cert=None,
error_trace=False,
cert_file=None,
key_file=None,
username=None,
password=None,
schema=None):
return Connection(servers=servers,
timeout=timeout,
client=client,
verify_ssl_cert=verify_ssl_cert,
ca_cert=ca_cert,
error_trace=error_trace,
cert_file=cert_file,
key_file=key_file,
username=username,
password=password,
schema=schema) | [
" Create a :class:Connection object\n\n :param servers:\n either a string in the form of '<hostname>:<port>'\n or a list of servers in the form of ['<hostname>:<port>', '...']\n :param timeout:\n (optional)\n define the retry timeout for unreachable servers in seconds\n :param client:\n (optional - for testing)\n client used to communicate with crate.\n :param verify_ssl_cert:\n if set to ``True`` verify the servers SSL server certificate.\n defaults to ``False``\n :param ca_cert:\n a path to a CA certificate to use when verifying the SSL server\n certificate.\n :param error_trace:\n if set to ``True`` return a whole stacktrace of any server error if\n one occurs\n :param cert_file:\n a path to the client certificate to present to the server.\n :param key_file:\n a path to the client key to use when communicating with the server.\n :param username:\n the username in the database.\n :param password:\n the password of the user in the database.\n\n >>> connect(['host1:4200', 'host2:4200'])\n <Connection <Client ['http://host1:4200', 'http://host2:4200']>>\n "
]
|
Please provide a description of the function:def rewrite_update(clauseelement, multiparams, params):
newmultiparams = []
_multiparams = multiparams[0]
if len(_multiparams) == 0:
return clauseelement, multiparams, params
for _params in _multiparams:
newparams = {}
for key, val in _params.items():
if (
not isinstance(val, MutableDict) or
(not any(val._changed_keys) and not any(val._deleted_keys))
):
newparams[key] = val
continue
for subkey, subval in val.items():
if subkey in val._changed_keys:
newparams["{0}['{1}']".format(key, subkey)] = subval
for subkey in val._deleted_keys:
newparams["{0}['{1}']".format(key, subkey)] = None
newmultiparams.append(newparams)
_multiparams = (newmultiparams, )
clause = clauseelement.values(newmultiparams[0])
clause._crate_specific = True
return clause, _multiparams, params | [
" change the params to enable partial updates\n\n sqlalchemy by default only supports updates of complex types in the form of\n\n \"col = ?\", ({\"x\": 1, \"y\": 2}\n\n but crate supports\n\n \"col['x'] = ?, col['y'] = ?\", (1, 2)\n\n by using the `Craty` (`MutableDict`) type.\n The update statement is only rewritten if an item of the MutableDict was\n changed.\n "
]
|
Please provide a description of the function:def visit_insert(self, insert_stmt, asfrom=False, **kw):
self.stack.append(
{'correlate_froms': set(),
"asfrom_froms": set(),
"selectable": insert_stmt})
self.isinsert = True
crud_params = crud._get_crud_params(self, insert_stmt, **kw)
if not crud_params and \
not self.dialect.supports_default_values and \
not self.dialect.supports_empty_insert:
raise NotImplementedError(
"The '%s' dialect with current database version settings does "
"not support empty inserts." % self.dialect.name)
if insert_stmt._has_multi_parameters:
if not self.dialect.supports_multivalues_insert:
raise NotImplementedError(
"The '%s' dialect with current database "
"version settings does not support "
"in-place multirow inserts." % self.dialect.name)
crud_params_single = crud_params[0]
else:
crud_params_single = crud_params
preparer = self.preparer
supports_default_values = self.dialect.supports_default_values
text = "INSERT "
if insert_stmt._prefixes:
text += self._generate_prefixes(insert_stmt,
insert_stmt._prefixes, **kw)
text += "INTO "
table_text = preparer.format_table(insert_stmt.table)
if insert_stmt._hints:
dialect_hints = dict([
(table, hint_text)
for (table, dialect), hint_text in
insert_stmt._hints.items()
if dialect in ('*', self.dialect.name)
])
if insert_stmt.table in dialect_hints:
table_text = self.format_from_hint_text(
table_text,
insert_stmt.table,
dialect_hints[insert_stmt.table],
True
)
text += table_text
if crud_params_single or not supports_default_values:
text += " (%s)" % ', '.join([preparer.format_column(c[0])
for c in crud_params_single])
if self.returning or insert_stmt._returning:
self.returning = self.returning or insert_stmt._returning
returning_clause = self.returning_clause(
insert_stmt, self.returning)
if self.returning_precedes_values:
text += " " + returning_clause
if insert_stmt.select is not None:
text += " (%s)" % self.process(self._insert_from_select, **kw)
elif not crud_params and supports_default_values:
text += " DEFAULT VALUES"
elif insert_stmt._has_multi_parameters:
text += " VALUES %s" % (
", ".join(
"(%s)" % (
', '.join(c[1] for c in crud_param_set)
)
for crud_param_set in crud_params
)
)
else:
text += " VALUES (%s)" % \
', '.join([c[1] for c in crud_params])
if self.returning and not self.returning_precedes_values:
text += " " + returning_clause
self.stack.pop(-1)
return text | [
"\n used to compile <sql.expression.Insert> expressions.\n\n this function wraps insert_from_select statements inside\n parentheses to be conform with earlier versions of CreateDB.\n "
]
|
Please provide a description of the function:def visit_update(self, update_stmt, **kw):
if not update_stmt.parameters and \
not hasattr(update_stmt, '_crate_specific'):
return super(CrateCompiler, self).visit_update(update_stmt, **kw)
self.isupdate = True
extra_froms = update_stmt._extra_froms
text = 'UPDATE '
if update_stmt._prefixes:
text += self._generate_prefixes(update_stmt,
update_stmt._prefixes, **kw)
table_text = self.update_tables_clause(update_stmt, update_stmt.table,
extra_froms, **kw)
dialect_hints = None
if update_stmt._hints:
dialect_hints, table_text = self._setup_crud_hints(
update_stmt, table_text
)
crud_params = self._get_crud_params(update_stmt, **kw)
text += table_text
text += ' SET '
include_table = extra_froms and \
self.render_table_with_column_in_update_from
set_clauses = []
for k, v in crud_params:
clause = k._compiler_dispatch(self,
include_table=include_table) + \
' = ' + v
set_clauses.append(clause)
for k, v in update_stmt.parameters.items():
if isinstance(k, str) and '[' in k:
bindparam = sa.sql.bindparam(k, v)
set_clauses.append(k + ' = ' + self.process(bindparam))
text += ', '.join(set_clauses)
if self.returning or update_stmt._returning:
if not self.returning:
self.returning = update_stmt._returning
if self.returning_precedes_values:
text += " " + self.returning_clause(
update_stmt, self.returning)
if extra_froms:
extra_from_text = self.update_from_clause(
update_stmt,
update_stmt.table,
extra_froms,
dialect_hints,
**kw)
if extra_from_text:
text += " " + extra_from_text
if update_stmt._whereclause is not None:
t = self.process(update_stmt._whereclause)
if t:
text += " WHERE " + t
limit_clause = self.update_limit_clause(update_stmt)
if limit_clause:
text += " " + limit_clause
if self.returning and not self.returning_precedes_values:
text += " " + self.returning_clause(
update_stmt, self.returning)
return text | [
"\n used to compile <sql.expression.Update> expressions\n Parts are taken from the SQLCompiler base class.\n "
]
|
Please provide a description of the function:def _get_crud_params(compiler, stmt, **kw):
compiler.postfetch = []
compiler.insert_prefetch = []
compiler.update_prefetch = []
compiler.returning = []
# no parameters in the statement, no parameters in the
# compiled params - return binds for all columns
if compiler.column_keys is None and stmt.parameters is None:
return [(c, crud._create_bind_param(compiler, c, None,
required=True))
for c in stmt.table.columns]
if stmt._has_multi_parameters:
stmt_parameters = stmt.parameters[0]
else:
stmt_parameters = stmt.parameters
# getters - these are normally just column.key,
# but in the case of mysql multi-table update, the rules for
# .key must conditionally take tablename into account
if SA_VERSION >= SA_1_1:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler, stmt)
else:
_column_as_key, _getattr_col_key, _col_bind_name = \
crud._key_getters_for_crud_column(compiler)
# if we have statement parameters - set defaults in the
# compiled params
if compiler.column_keys is None:
parameters = {}
else:
parameters = dict((_column_as_key(key), crud.REQUIRED)
for key in compiler.column_keys
if not stmt_parameters or
key not in stmt_parameters)
# create a list of column assignment clauses as tuples
values = []
if stmt_parameters is not None:
crud._get_stmt_parameters_params(
compiler,
parameters, stmt_parameters, _column_as_key, values, kw)
check_columns = {}
crud._scan_cols(compiler, stmt, parameters,
_getattr_col_key, _column_as_key,
_col_bind_name, check_columns, values, kw)
if stmt._has_multi_parameters:
values = crud._extend_values_for_multiparams(compiler, stmt,
values, kw)
return values | [
" extract values from crud parameters\n\n taken from SQLAlchemy's crud module (since 1.0.x) and\n adapted for Crate dialect"
]
|
Please provide a description of the function:def get_tgt_for(user):
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
try:
return Tgt.objects.get(username=user.username)
except ObjectDoesNotExist:
logger.warning('No ticket found for user {user}'.format(
user=user.username
))
raise CasTicketException("no ticket found for user " + user.username) | [
"\n Fetch a ticket granting ticket for a given user.\n\n :param user: UserObj\n\n :return: TGT or Exepction\n "
]
|
Please provide a description of the function:def delete_old_tickets(**kwargs):
sender = kwargs.get('sender', None)
now = datetime.now()
expire = datetime(now.year, now.month, now.day - 2)
sender.objects.filter(created__lt=expire).delete() | [
"\n Delete tickets if they are over 2 days old\n kwargs = ['raw', 'signal', 'instance', 'sender', 'created']\n\n "
]
|
Please provide a description of the function:def get_proxy_ticket_for(self, service):
if not settings.CAS_PROXY_CALLBACK:
raise CasConfigException("No proxy callback set in settings")
params = {'pgt': self.tgt, 'targetService': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxy') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('proxySuccess'):
return tree[0][0].text
else:
logger.warning('Failed to get proxy ticket')
raise CasTicketException('Failed to get proxy ticket: %s' % \
tree[0].text.strip())
finally:
page.close() | [
"\n Verifies CAS 2.0+ XML-based authentication ticket.\n\n :param: service\n\n Returns username on success and None on failure.\n "
]
|
Please provide a description of the function:def _verify_cas1(ticket, service):
params = {'ticket': ticket, 'service': service}
url = (urljoin(settings.CAS_SERVER_URL, 'validate') + '?' +
urlencode(params))
page = urlopen(url)
try:
verified = page.readline().strip()
if verified == 'yes':
return page.readline().strip()
else:
return None
finally:
page.close() | [
"\n Verifies CAS 1.0 authentication ticket.\n\n :param: ticket\n :param: service\n\n Returns username on success and None on failure.\n "
]
|
Please provide a description of the function:def _internal_verify_cas(ticket, service, suffix):
params = {'ticket': ticket, 'service': service}
if settings.CAS_PROXY_CALLBACK:
params['pgtUrl'] = settings.CAS_PROXY_CALLBACK
url = (urljoin(settings.CAS_SERVER_URL, suffix) + '?' +
urlencode(params))
page = urlopen(url)
username = None
try:
response = page.read()
tree = ElementTree.fromstring(response)
document = minidom.parseString(response)
if tree[0].tag.endswith('authenticationSuccess'):
if settings.CAS_RESPONSE_CALLBACKS:
cas_response_callbacks(tree)
username = tree[0][0].text
pgt_el = document.getElementsByTagName('cas:proxyGrantingTicket')
if pgt_el:
pgt = pgt_el[0].firstChild.nodeValue
try:
pgtIou = _get_pgtiou(pgt)
tgt = Tgt.objects.get(username=username)
tgt.tgt = pgtIou.tgt
tgt.save()
pgtIou.delete()
except Tgt.DoesNotExist:
Tgt.objects.create(username=username, tgt=pgtIou.tgt)
logger.info('Creating TGT ticket for {user}'.format(
user=username
))
pgtIou.delete()
except Exception as e:
logger.warning('Failed to do proxy authentication. {message}'.format(
message=e
))
else:
failure = document.getElementsByTagName('cas:authenticationFailure')
if failure:
logger.warn('Authentication failed from CAS server: %s',
failure[0].firstChild.nodeValue)
except Exception as e:
logger.error('Failed to verify CAS authentication: {message}'.format(
message=e
))
finally:
page.close()
return username | [
"Verifies CAS 2.0 and 3.0 XML-based authentication ticket.\n\n Returns username on success and None on failure.\n "
]
|
Please provide a description of the function:def verify_proxy_ticket(ticket, service):
params = {'ticket': ticket, 'service': service}
url = (urljoin(settings.CAS_SERVER_URL, 'proxyValidate') + '?' +
urlencode(params))
page = urlopen(url)
try:
response = page.read()
tree = ElementTree.fromstring(response)
if tree[0].tag.endswith('authenticationSuccess'):
username = tree[0][0].text
proxies = []
if len(tree[0]) > 1:
for element in tree[0][1]:
proxies.append(element.text)
return {"username": username, "proxies": proxies}
else:
return None
finally:
page.close() | [
"\n Verifies CAS 2.0+ XML-based proxy ticket.\n\n :param: ticket\n :param: service\n\n Returns username on success and None on failure.\n "
]
|
Please provide a description of the function:def _get_pgtiou(pgt):
pgtIou = None
retries_left = 5
if not settings.CAS_PGT_FETCH_WAIT:
retries_left = 1
while not pgtIou and retries_left:
try:
return PgtIOU.objects.get(tgt=pgt)
except PgtIOU.DoesNotExist:
if settings.CAS_PGT_FETCH_WAIT:
time.sleep(1)
retries_left -= 1
logger.info('Did not fetch ticket, trying again. {tries} tries left.'.format(
tries=retries_left
))
raise CasTicketException("Could not find pgtIou for pgt %s" % pgt) | [
"\n Returns a PgtIOU object given a pgt.\n\n The PgtIOU (tgt) is set by the CAS server in a different request\n that has completed before this call, however, it may not be found in\n the database by this calling thread, hence the attempt to get the\n ticket is retried for up to 5 seconds. This should be handled some\n better way.\n\n Users can opt out of this waiting period by setting CAS_PGT_FETCH_WAIT = False\n\n :param: pgt\n\n "
]
|
Please provide a description of the function:def authenticate(self, request, ticket, service):
User = get_user_model()
username = _verify(ticket, service)
if not username:
return None
try:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
# user will have an "unusable" password
if settings.CAS_AUTO_CREATE_USER:
user = User.objects.create_user(username, '')
user.save()
else:
user = None
return user | [
"\n Verifies CAS ticket and gets or creates User object\n NB: Use of PT to identify proxy\n "
]
|
Please provide a description of the function:def gateway():
if settings.CAS_GATEWAY == False:
raise ImproperlyConfigured('CAS_GATEWAY must be set to True')
def wrap(func):
def wrapped_f(*args):
from cas.views import login
request = args[0]
try:
# use callable for pre-django 2.0
is_authenticated = request.user.is_authenticated()
except TypeError:
is_authenticated = request.user.is_authenticated
if is_authenticated:
# Is Authed, fine
pass
else:
path_with_params = request.path + '?' + urlencode(request.GET.copy())
if request.GET.get('ticket'):
# Not Authed, but have a ticket!
# Try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
# For certain instances where a forbidden occurs, we need to pass instead of return a response.
return response
else:
#Not Authed, but no ticket
gatewayed = request.GET.get('gatewayed')
if gatewayed == 'true':
pass
else:
# Not Authed, try to authenticate
response = login(request, path_with_params, False, True)
if isinstance(response, HttpResponseRedirect):
return response
return func(*args)
return wrapped_f
return wrap | [
"\n Authenticates single sign on session if ticket is available,\n but doesn't redirect to sign in url otherwise.\n "
]
|
Please provide a description of the function:def _service_url(request, redirect_to=None, gateway=False):
if settings.CAS_FORCE_SSL_SERVICE_URL:
protocol = 'https://'
else:
protocol = ('http://', 'https://')[request.is_secure()]
host = request.get_host()
service = protocol + host + request.path
if redirect_to:
if '?' in service:
service += '&'
else:
service += '?'
if gateway:
gateway_params = [(REDIRECT_FIELD_NAME, redirect_to), ('gatewayed', 'true')]
query_dict = request.GET.copy()
try:
del query_dict['ticket']
except:
pass
query_list = query_dict.items()
# remove duplicate params
for item in query_list:
for index, item2 in enumerate(gateway_params):
if item[0] == item2[0]:
gateway_params.pop(index)
extra_params = gateway_params + query_list
#Sort params by key name so they are always in the same order.
sorted_params = sorted(extra_params, key=itemgetter(0))
service += urlencode(sorted_params)
else:
service += urlencode({REDIRECT_FIELD_NAME: redirect_to})
return service | [
"\n Generates application service URL for CAS\n\n :param: request Request Object\n :param: redirect_to URL to redriect to\n :param: gateway Should this be a gatewayed pass through\n\n ",
" If gateway, capture params and reencode them before returning a url "
]
|
Please provide a description of the function:def _redirect_url(request):
next = request.GET.get(REDIRECT_FIELD_NAME)
if not next:
if settings.CAS_IGNORE_REFERER:
next = settings.CAS_REDIRECT_URL
else:
next = request.META.get('HTTP_REFERER', settings.CAS_REDIRECT_URL)
host = request.get_host()
prefix = (('http://', 'https://')[request.is_secure()] + host)
if next.startswith(prefix):
next = next[len(prefix):]
return next | [
"\n Redirects to referring page, or CAS_REDIRECT_URL if no referrer is\n set.\n\n :param: request RequestObj\n\n "
]
|
Please provide a description of the function:def _login_url(service, ticket='ST', gateway=False):
LOGINS = {'ST': 'login',
'PT': 'proxyValidate'}
if gateway:
params = {'service': service, 'gateway': 'true'}
else:
params = {'service': service}
if settings.CAS_EXTRA_LOGIN_PARAMS:
params.update(settings.CAS_EXTRA_LOGIN_PARAMS)
if not ticket:
ticket = 'ST'
login_type = LOGINS.get(ticket[:2], 'login')
return urlparse.urljoin(settings.CAS_SERVER_URL, login_type) + '?' + urlencode(params) | [
"\n Generates CAS login URL\n\n :param: service Service URL\n :param: ticket Ticket\n :param: gateway Gatewayed\n\n "
]
|
Please provide a description of the function:def _logout_url(request, next_page=None):
url = urlparse.urljoin(settings.CAS_SERVER_URL, 'logout')
if next_page and getattr(settings, 'CAS_PROVIDE_URL_TO_LOGOUT', True):
parsed_url = urlparse.urlparse(next_page)
if parsed_url.scheme: #If next_page is a protocol-rooted url, skip redirect url construction
url += '?' + urlencode({'service': next_page})
else:
protocol = ('http://', 'https://')[request.is_secure()]
host = request.get_host()
url += '?' + urlencode({'service': protocol + host + next_page})
return url | [
"\n Generates CAS logout URL\n\n :param: request RequestObj\n :param: next_page Page to redirect after logout.\n\n "
]
|
Please provide a description of the function:def login(request, next_page=None, required=False, gateway=False):
if not next_page:
next_page = _redirect_url(request)
try:
# use callable for pre-django 2.0
is_authenticated = request.user.is_authenticated()
except TypeError:
is_authenticated = request.user.is_authenticated
if is_authenticated:
return HttpResponseRedirect(next_page)
ticket = request.GET.get('ticket')
if gateway:
service = _service_url(request, next_page, True)
else:
service = _service_url(request, next_page, False)
if ticket:
user = auth.authenticate(ticket=ticket, service=service)
if user is not None:
auth.login(request, user)
if settings.CAS_PROXY_CALLBACK:
proxy_callback(request)
return HttpResponseRedirect(next_page)
elif settings.CAS_RETRY_LOGIN or required:
if gateway:
return HttpResponseRedirect(_login_url(service, ticket, True))
else:
return HttpResponseRedirect(_login_url(service, ticket, False))
else:
logger.warning('User has a valid ticket but not a valid session')
# Has ticket, not session
if gateway:
# Gatewayed responses should nto redirect.
return False
if getattr(settings, 'CAS_CUSTOM_FORBIDDEN'):
return HttpResponseRedirect(reverse(settings.CAS_CUSTOM_FORBIDDEN) + "?" + request.META['QUERY_STRING'])
else:
error = "<h1>Forbidden</h1><p>Login failed.</p>"
return HttpResponseForbidden(error)
else:
if gateway:
return HttpResponseRedirect(_login_url(service, ticket, True))
else:
return HttpResponseRedirect(_login_url(service, ticket, False)) | [
"\n Forwards to CAS login URL or verifies CAS ticket\n\n :param: request RequestObj\n :param: next_page Next page to redirect after login\n :param: required\n :param: gateway Gatewayed response\n\n "
]
|
Please provide a description of the function:def logout(request, next_page=None):
auth.logout(request)
if not next_page:
next_page = _redirect_url(request)
if settings.CAS_LOGOUT_COMPLETELY:
return HttpResponseRedirect(_logout_url(request, next_page))
else:
return HttpResponseRedirect(next_page) | [
"\n Redirects to CAS logout page\n\n :param: request RequestObj\n :param: next_page Page to redirect to\n\n "
]
|
Please provide a description of the function:def proxy_callback(request):
pgtIou = request.GET.get('pgtIou')
tgt = request.GET.get('pgtId')
if not (pgtIou and tgt):
logger.info('No pgtIou or tgt found in request.GET')
return HttpResponse('No pgtIOO', content_type="text/plain")
try:
PgtIOU.objects.create(tgt=tgt, pgtIou=pgtIou, created=datetime.datetime.now())
request.session['pgt-TICKET'] = pgtIou
return HttpResponse('PGT ticket is: {ticket}'.format(ticket=pgtIou), content_type="text/plain")
except Exception as e:
logger.warning('PGT storage failed. {message}'.format(
message=e
))
return HttpResponse('PGT storage failed for {request}'.format(request=str(request.GET)),
content_type="text/plain") | [
"Handles CAS 2.0+ XML-based proxy callback call.\n Stores the proxy granting ticket in the database for\n future use.\n\n NB: Use created and set it in python in case database\n has issues with setting up the default timestamp value\n "
]
|
Please provide a description of the function:def process_view(self, request, view_func, view_args, view_kwargs):
if view_func == login:
return cas_login(request, *view_args, **view_kwargs)
elif view_func == logout:
return cas_logout(request, *view_args, **view_kwargs)
if settings.CAS_ADMIN_PREFIX:
if not request.path.startswith(settings.CAS_ADMIN_PREFIX):
return None
elif not view_func.__module__.startswith('django.contrib.admin.'):
return None
try:
# use callable for pre-django 2.0
is_authenticated = request.user.is_authenticated()
except TypeError:
is_authenticated = request.user.is_authenticated
if is_authenticated:
if request.user.is_staff:
return None
else:
error = ('<h1>Forbidden</h1><p>You do not have staff '
'privileges.</p>')
return HttpResponseForbidden(error)
params = urlencode({REDIRECT_FIELD_NAME: request.get_full_path()})
return HttpResponseRedirect(reverse(cas_login) + '?' + params) | [
"\n Forwards unauthenticated requests to the admin page to the CAS\n login URL, as well as calls to django.contrib.auth.views.login and\n logout.\n "
]
|
Please provide a description of the function:def process_exception(self, request, exception):
if isinstance(exception, CasTicketException):
do_logout(request)
# This assumes that request.path requires authentication.
return HttpResponseRedirect(request.path)
else:
return None | [
"\n When we get a CasTicketException, that is probably caused by the ticket timing out.\n So logout/login and get the same page again.\n "
]
|
Please provide a description of the function:def objectify(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
payload = func(*args, **kwargs)
except requests.exceptions.ConnectionError as e:
raise InternetConnectionError(e)
return EventbriteObject.create(payload)
return wrapper | [
" Converts the returned value from a models.Payload to\n a models.EventbriteObject. Used by the access methods\n of the client.Eventbrite object\n "
]
|
Please provide a description of the function:def get_category(self, id, **data):
return self.get("/categories/{0}/".format(id), data=data) | [
"\n GET /categories/:id/\n Gets a :format:`category` by ID as ``category``.\n "
]
|
Please provide a description of the function:def get_subcategory(self, id, **data):
return self.get("/subcategories/{0}/".format(id), data=data) | [
"\n GET /subcategories/:id/\n Gets a :format:`subcategory` by ID as ``subcategory``.\n "
]
|
Please provide a description of the function:def get_event(self, id, **data):
return self.get("/events/{0}/".format(id), data=data) | [
"\n GET /events/:id/\n Returns an :format:`event` for the specified event. Many of Eventbrite’s API use cases revolve around pulling details\n of a specific event within an Eventbrite account. Does not support fetching a repeating event series parent\n (see :ref:`get-series-by-id`).\n "
]
|
Please provide a description of the function:def post_event(self, id, **data):
return self.post("/events/{0}/".format(id), data=data) | [
"\n POST /events/:id/\n Updates an event. Returns an :format:`event` for the specified event. Does not support updating a repeating event\n series parent (see POST /series/:id/).\n "
]
|
Please provide a description of the function:def post_event_publish(self, id, **data):
return self.post("/events/{0}/publish/".format(id), data=data) | [
"\n POST /events/:id/publish/\n Publishes an event if it has not already been deleted. In order for publish to be permitted, the event must have all\n necessary information, including a name and description, an organizer, at least one ticket, and valid payment options.\n This API endpoint will return argument errors for event fields that fail to validate the publish requirements. Returns\n a boolean indicating success or failure of the publish.\n field_error event.name MISSING\n Your event must have a name to be published.\n field_error event.start MISSING\n Your event must have a start date to be published.\n field_error event.end MISSING\n Your event must have an end date to be published.\n field_error event.start.timezone MISSING\n Your event start and end dates must have matching time zones to be published.\n field_error event.organizer MISSING\n Your event must have an organizer to be published.\n field_error event.currency MISSING\n Your event must have a currency to be published.\n field_error event.currency INVALID\n Your event must have a valid currency to be published.\n field_error event.tickets MISSING\n Your event must have at least one ticket to be published.\n field_error event.tickets.N.name MISSING\n All tickets must have names in order for your event to be published. The N will be the ticket class ID with the\n error.\n field_error event.tickets.N.quantity_total MISSING\n All non-donation tickets must have an available quantity value in order for your event to be published. The N\n will be the ticket class ID with the error.\n field_error event.tickets.N.cost MISSING\n All non-donation tickets must have a cost (which can be ``0.00`` for free tickets) in order for your event to\n be published. The N will be the ticket class ID with the error.\n "
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.