text
stringlengths 4
1.02M
| meta
dict |
---|---|
from regtemplate.regtemplate import RegTemplate
template = RegTemplate()
template.parse_template(r"""
Displaying numbers from index {{ index_name|word }}
Number of interesting events: {{ num_events|digits }}
Number of pages: {{ num_pages|digits }}
""")
matches = template.match(r"""
Displaying numbers from index SuperIndex
Number of interesting events: 45678
Number of pages: 9876
""")
print matches | {
"content_hash": "826dc1c41addc1e94ce61cbae129f10c",
"timestamp": "",
"source": "github",
"line_count": 17,
"max_line_length": 53,
"avg_line_length": 23.764705882352942,
"alnum_prop": 0.7574257425742574,
"repo_name": "magnusjt/regtemplate",
"id": "139da57e2451a5b6784f958d5bf47718a0058a42",
"size": "404",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/ex1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "4342"
},
{
"name": "Shell",
"bytes": "200"
}
],
"symlink_target": ""
} |
"""
Tests for template generation.
"""
from unittest import TestCase
from fabric.api import env
from fabric.state import commands
from os.path import dirname, join
from nose.tools import eq_, ok_
from mock import patch, Mock
from confab.autotasks import generate_tasks
class TestAutoTasks(TestCase):
def setUp(self):
# create tasks
self.settings = generate_tasks(join(dirname(__file__), "data/autotasks"))
def tearDown(self):
if "environmentdef" in env:
del env["environmentdef"]
def test_environment_autotask(self):
"""
Calling generate_tasks() creates a task that loads each environment.
"""
for environment in self.settings.environmentdefs.iterkeys():
task = commands[environment]
ok_("environmentdef" not in env)
task()
ok_("environmentdef" in env)
environmentdef = env.environmentdef
eq_(environment, environmentdef.name)
del env["environmentdef"]
def test_idempotency(self):
"""
Environment tasks are idempotent (as long as no other environment is loaded)
"""
task = commands["local"]
task()
task()
def test_mismatch(self):
"""
Environment tasks for different environments cannot be used together.
"""
error = Mock()
error.side_effect = Exception("abort")
with patch("confab.autotasks.abort", error):
local = commands["local"]
local()
other = commands["other"]
with self.assertRaises(Exception) as capture:
other()
eq_("abort", capture.exception.message)
| {
"content_hash": "3f6557ecd1b59b2d116494550d326326",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 84,
"avg_line_length": 28.6,
"alnum_prop": 0.6048951048951049,
"repo_name": "locationlabs/confab",
"id": "01e9743b70a2090aee3fe1069d682d0aab19f66f",
"size": "1716",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "confab/tests/test_autotasks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "119227"
}
],
"symlink_target": ""
} |
import libtcodpy as libtcod
from vec2d import Vec2d
from model.action import Action, ActionTag
from model.attribute import AttributeTag
from behavior import Behavior
class AIRandomWalkBehavior(Behavior):
def __init__(self, manager):
Behavior.__init__(self, manager)
def generate_actions(self):
events = []
for id,entity in filter(lambda ent: ent[1].get_attribute(AttributeTag.HostileProgram), self.manager.entities.iteritems()):
#TODO: pull an RNG out into entity manager so I can properly save and control rng generation for the purposes of being a roguelike
new_position = Vec2d(libtcod.random_get_int(0, -1, 1), libtcod.random_get_int(0, -1, 1))
#mildly biases horizontal movement
if new_position[0] != 0:
new_position[1] = 0
events.append(Action(ActionTag.ProgramMovement, {'target_id':entity.id, 'value':new_position}))
return events | {
"content_hash": "3cb7bb9b803667d10b69f9c462b7c7b6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 133,
"avg_line_length": 37.95652173913044,
"alnum_prop": 0.7457044673539519,
"repo_name": "RCIX/RogueP",
"id": "ad4b522725db2d36195bb91945588c0b4477f278",
"size": "873",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "model/behaviors/behavior_ai_randomwalk.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "139221"
}
],
"symlink_target": ""
} |
"""
Contains classes and functions for working with the bibliographic information.
Currently there are three such classes:
* :class:`Article` - For articles in a journal, magazine, or other periodical
* :class:`Book` - For complete books
* :class:`Thesis` - For a graduate thesis
The above are all derived from the base :class:`Reference` class, which can
also be used if the reference does not fit into any of the above categories.
"""
import re
################################################################################
class Reference:
"""
A base class for representing bibliographic information. The attributes are:
=================== ========================================================
Attribute Description
=================== ========================================================
`authors` A list of the authors of the reference
`title` The title of the reference
`year` The year the reference was published (as a string)
`doi` A DOI link to the reference
`url` Any other link to the reference
=================== ========================================================
"""
def __init__(self, authors=None, title='', year='', doi='', url=''):
self.authors = authors or []
self.title = title
self.year = year
self.doi = doi
self.url = url
def __repr__(self):
"""
Return a string representation of the reference that can be used to
reconstruct the object.
"""
string = self.toPrettyRepr()
string = re.sub(r'\(\n ', '(', string)
string = re.sub(r',\n ', ', ', string)
string = re.sub(r',\n\)', ')', string)
string = re.sub(r' = ', '=', string)
return string
def __str__(self):
"""
Return a string representation of the reference in reStructuredText
format.
"""
string = self.getAuthorString()
if self.title != '':
string += u' *{0}*'.format(self.title)
if self.year != '':
string += u' ({0})'.format(self.year)
if string and string[-1] != '.': string += '.'
return string
def toPrettyRepr(self):
"""
Return a string representation of the reference that can be used to
reconstruct the object.
"""
string = u'Reference(\n'
if len(self.authors) != 0: string += u' authors = [{0}],\n'.format(', '.join(['"{0}"'.format(author) for author in self.authors]))
if self.title != '': string += u' title = {0!r},\n'.format(self.title)
if self.year != '': string += u' year = "{0}",\n'.format(self.year)
if self.doi != '': string += u' doi = "{0}",\n'.format(self.doi)
if self.url != '': string += u' url = "{0}",\n'.format(self.url)
return string + u')'
def getAuthorString(self):
"""
Return a pretty, reStructuredText-formatted string of the authors.
"""
authors = ''
if self.authors is not None and len(self.authors) > 0:
if len(self.authors) == 1:
authors = u'{0}.'.format(self.authors[0])
elif len(self.authors) == 2:
authors = u'{0} and {1}.'.format(self.authors[0], self.authors[1])
elif self.authors[-1] == 'et al':
authors = u'{0} et al.'.format(', '.join(self.authors[:-1]))
else:
authors = u'{0}, and {1}.'.format(', '.join(self.authors[:-1]), self.authors[-1])
# reStructuredText automatically interprets "A." et al as a
# numbered list; this suppresses that behavior
if authors[1:3] == '. ':
authors = authors[0:2] + u'\ ' + authors[2:]
# If the last author is of the form "Lastname, A. B.", this will
# remove the extra period at the end of the sentence
if authors[-2:] == '..':
authors = authors[:-1]
return authors
################################################################################
class Article(Reference):
"""
A class for representing an article in a journal, magazine, or other
periodical. The attributes are:
=================== ========================================================
Attribute Description
=================== ========================================================
`authors` A list of the authors of the reference
`title` The title of the reference
`journal` The abbreviated name of the journal
`volume` The volume that the article appears in (as a string)
`number` The number that the article appears in (as a string)
`pages` The range of pages of the article (as a string)
`year` The year the reference was published (as a string)
`doi` A DOI link to the reference
`url` Any other link to the reference
=================== ========================================================
"""
def __init__(self, authors=None, title='', journal='', volume='', number='', pages='', year='', doi='', url=''):
Reference.__init__(self, authors=authors, title=title, year=year, doi=doi, url=url)
self.journal = journal
self.volume = volume
self.number = number
self.pages = pages
def __str__(self):
"""
Return a string representation of the reference in reStructuredText
format.
"""
string = self.getAuthorString()
if self.title != '':
string += u' "{0}."'.format(self.title)
if self.journal != '':
string += u' *{0}*'.format(self.journal)
if self.volume != '':
string += u' **{0}**'.format(self.volume)
if self.number != '':
string += u' ({0})'.format(self.number)
if self.pages != '':
string += u', p. {0}'.format(self.pages)
if self.year != '':
string += u' ({0})'.format(self.year)
if string and string[-1] != '.': string += u'.'
return string
def toPrettyRepr(self):
"""
Return a string representation of the reference that can be used to
reconstruct the object.
"""
string = u'Article(\n'
if len(self.authors) != 0: string += u' authors = [{0}],\n'.format(', '.join(['"{0}"'.format(author) for author in self.authors]))
if self.title != '': string += u' title = {0!r},\n'.format(self.title)
if self.journal != '': string += u' journal = "{0}",\n'.format(self.journal)
if self.volume != '': string += u' volume = "{0}",\n'.format(self.volume)
if self.number != '': string += u' number = "{0}",\n'.format(self.number)
if self.pages != '': string += u' pages = """{0}""",\n'.format(self.pages)
if self.year != '': string += u' year = "{0}",\n'.format(self.year)
if self.doi != '': string += u' doi = "{0}",\n'.format(self.doi)
if self.url != '': string += u' url = "{0}",\n'.format(self.url)
return string + u')'
################################################################################
class Book(Reference):
"""
A class for representing a complete book. The attributes are:
=================== ========================================================
Attribute Description
=================== ========================================================
`authors` A list of the authors of the reference
`title` The title of the reference
`publisher` The publisher of the book
`address` The address of the publisher (usually city and state/country)
`volume` The volume of the book
`series` The series the book belongs to
`edition` The edition of the book, as a string ordinal (e.g. ``'First'``)
`year` The year the reference was published (as a string)
`doi` A DOI link to the reference
`url` Any other link to the reference
=================== ========================================================
"""
def __init__(self, authors=None, title='', publisher='', address='', volume='', series='', edition='', year='', doi='', url=''):
Reference.__init__(self, authors=authors, title=title, year=year, doi=doi, url=url)
self.publisher = publisher
self.address = address
self.volume = volume
self.series = series
self.edition = edition
def __str__(self):
"""
Return a string representation of the reference in reStructuredText
format.
"""
string = self.getAuthorString()
if self.title != '':
string += u' *{0}.*'.format(self.title)
if self.edition != '':
string += u' {0} edition.'.format(self.edition)
if self.volume != '':
string += u' Vol. {0}.'.format(self.volume)
if self.address != '':
string += u' {0}:'.format(self.address)
if self.publisher != '':
string += u' **{0}**'.format(self.publisher)
if self.year != '':
string += u' ({0})'.format(self.year)
return string + u'.'
def toPrettyRepr(self):
"""
Return a string representation of the reference that can be used to
reconstruct the object.
"""
string = u'Book(\n'
if len(self.authors) != 0: string += u' authors = [{0}],\n'.format(', '.join(['"{0}"'.format(author) for author in self.authors]))
if self.title != '': string += u' title = {0!r},\n'.format(self.title)
if self.publisher != '': string += u' publisher = "{0}",\n'.format(self.publisher)
if self.address != '': string += u' address = "{0}",\n'.format(self.address)
if self.volume != '': string += u' volume = "{0}",\n'.format(self.volume)
if self.series != '': string += u' series = """{0}""",\n'.format(self.series)
if self.edition != '': string += u' edition = """{0}""",\n'.format(self.edition)
if self.year != '': string += u' year = "{0}",\n'.format(self.year)
if self.doi != '': string += u' doi = "{0}",\n'.format(self.doi)
if self.url != '': string += u' url = "{0}",\n'.format(self.url)
return string + u')'
################################################################################
class Thesis(Reference):
"""
A class for representing a graduate thesis. The attributes are:
=================== ========================================================
Attribute Description
=================== ========================================================
`authors` A list of the authors of the reference
`title` The title of the reference
`degree` ``'Ph.D.'`` or ``'Masters'``
`school` The name of the institution at which the thesis was written
`year` The year the reference was published (as a string)
`doi` A DOI link to the reference
`url` Any other link to the reference
=================== ========================================================
"""
def __init__(self, authors=None, title='', degree='', school='', year='', doi='', url=''):
Reference.__init__(self, authors=authors, title=title, year=year, doi=doi, url=url)
self.degree = degree
self.school = school
def __str__(self):
"""
Return a string representation of the reference in reStructuredText
format.
"""
string = self.getAuthorString()
if self.title != '':
string += u' "{0}."'.format(self.title)
if self.degree != '':
string += u' {0} thesis.'.format(self.degree)
if self.school != '':
string += u' {0}'.format(self.school)
if self.year != '':
string += u' ({0})'.format(self.year)
if string and string[-1] != '.': string += u'.'
return string
def toPrettyRepr(self):
"""
Return a string representation of the reference that can be used to
reconstruct the object.
"""
string = u'Thesis(\n'
if len(self.authors) != 0: string += u' authors = [{0}],\n'.format(', '.join(['"{0}"'.format(author) for author in self.authors]))
if self.title != '': string += u' title = {0!r},\n'.format(self.title)
if self.degree != '': string += u' degree = "{0}",\n'.format(self.degree)
if self.school != '': string += u' school = "{0}",\n'.format(self.school)
if self.year != '': string += u' year = "{0}",\n'.format(self.year)
if self.doi != '': string += u' doi = "{0}",\n'.format(self.doi)
if self.url != '': string += u' url = "{0}",\n'.format(self.url)
return string + u')'
################################################################################
| {
"content_hash": "c4b783cd35dcedda704377ba92e70674",
"timestamp": "",
"source": "github",
"line_count": 298,
"max_line_length": 141,
"avg_line_length": 45.09731543624161,
"alnum_prop": 0.4694545725128358,
"repo_name": "nyee/RMG-Py",
"id": "72b08684f30dfa98c742ed3582bdcdc30694fcf1",
"size": "14885",
"binary": false,
"copies": "11",
"ref": "refs/heads/master",
"path": "rmgpy/data/reference.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "461"
},
{
"name": "Jupyter Notebook",
"bytes": "17473"
},
{
"name": "Makefile",
"bytes": "5832"
},
{
"name": "Python",
"bytes": "3406678"
},
{
"name": "Shell",
"bytes": "2733"
}
],
"symlink_target": ""
} |
from __future__ import division
import os
import platform
import re
from xml.etree import ElementTree as ET
import psutil
from six import iteritems, string_types
from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
from datadog_checks.base.utils.platform import Platform
from datadog_checks.base.utils.subprocess_output import SubprocessOutputEmptyError, get_subprocess_output
from datadog_checks.base.utils.timeout import TimeoutException, timeout
if platform.system() == 'Windows':
import win32wnet
# See: https://github.com/DataDog/integrations-core/pull/1109#discussion_r167133580
IGNORE_CASE = re.I
def _base_device_name(device):
return device.strip('\\').lower()
else:
IGNORE_CASE = 0
def _base_device_name(device):
return os.path.basename(device)
class Disk(AgentCheck):
"""Collects metrics about the machine's disks."""
METRIC_DISK = 'system.disk.{}'
METRIC_INODE = 'system.fs.inodes.{}'
def __init__(self, name, init_config, instances):
if instances is not None and len(instances) > 1:
raise ConfigurationError('Disk check only supports one configured instance.')
super(Disk, self).__init__(name, init_config, instances)
instance = instances[0]
self._use_mount = is_affirmative(instance.get('use_mount', False))
self._all_partitions = is_affirmative(instance.get('all_partitions', False))
self._file_system_include = instance.get('file_system_include', []) or instance.get('file_system_whitelist', [])
self._file_system_exclude = instance.get('file_system_exclude', []) or instance.get('file_system_blacklist', [])
# FIXME (8.X): Exclude special file systems by default
self._include_all_devices = is_affirmative(instance.get('include_all_devices', True))
self._device_include = instance.get('device_include', []) or instance.get('device_whitelist', [])
self._device_exclude = instance.get('device_exclude', []) or instance.get('device_blacklist', [])
self._mount_point_include = instance.get('mount_point_include', []) or instance.get('mount_point_whitelist', [])
self._mount_point_exclude = instance.get('mount_point_exclude', []) or instance.get('mount_point_blacklist', [])
self._tag_by_filesystem = is_affirmative(instance.get('tag_by_filesystem', False))
self._tag_by_label = is_affirmative(instance.get('tag_by_label', True))
self._device_tag_re = instance.get('device_tag_re', {})
self._custom_tags = instance.get('tags', [])
self._service_check_rw = is_affirmative(instance.get('service_check_rw', False))
self._min_disk_size = instance.get('min_disk_size', 0) * 1024 * 1024
self._blkid_cache_file = instance.get('blkid_cache_file')
self._use_lsblk = is_affirmative(instance.get('use_lsblk', False))
self._timeout = instance.get('timeout', 5)
self._compile_pattern_filters(instance)
self._compile_tag_re()
self._blkid_label_re = re.compile('LABEL=\"(.*?)\"', re.I)
if self._use_lsblk and self._blkid_cache_file:
raise ConfigurationError("Only one of 'use_lsblk' and 'blkid_cache_file' can be set at the same time.")
if platform.system() == 'Windows':
self._manual_mounts = instance.get('create_mounts', [])
self._create_manual_mounts()
deprecations_init_conf = {
'file_system_global_blacklist': 'file_system_global_exclude',
'device_global_blacklist': 'device_global_exclude',
'mount_point_global_blacklist': 'mount_point_global_exclude',
}
for old_name, new_name in deprecations_init_conf.items():
if init_config.get(old_name):
self.warning(
'`%s` is deprecated and will be removed in a future release. Please use `%s` instead.',
old_name,
new_name,
)
deprecations_instance = {
'file_system_whitelist': 'file_system_include',
'file_system_blacklist': 'file_system_exclude',
'device_whitelist': 'device_include',
'device_blacklist': 'device_exclude',
'mount_point_whitelist': 'mount_point_include',
'mount_point_blacklist': 'mount_point_exclude',
'excluded_filesystems': 'file_system_exclude',
'excluded_disks': 'device_exclude',
'excluded_disk_re': 'device_exclude',
'excluded_mountpoint_re': 'mount_point_exclude',
}
for old_name, new_name in deprecations_instance.items():
if instance.get(old_name):
self.warning(
'`%s` is deprecated and will be removed in a future release. Please use `%s` instead.',
old_name,
new_name,
)
self.devices_label = {}
def check(self, _):
"""Get disk space/inode stats"""
if self._tag_by_label and Platform.is_linux():
self.devices_label = self._get_devices_label()
for part in psutil.disk_partitions(all=self._include_all_devices):
# we check all exclude conditions
if self.exclude_disk(part):
self.log.debug('Excluding device %s', part.device)
continue
# Get disk metrics here to be able to exclude on total usage
try:
disk_usage = timeout(self._timeout)(psutil.disk_usage)(part.mountpoint)
except TimeoutException:
self.log.warning(
u'Timeout after %d seconds while retrieving the disk usage of `%s` mountpoint. '
u'You might want to change the timeout length in the settings.',
self._timeout,
part.mountpoint,
)
continue
except Exception as e:
self.log.warning(
u'Unable to get disk metrics for %s: %s. '
u'You can exclude this mountpoint in the settings if it is invalid.',
part.mountpoint,
e,
)
continue
# Exclude disks with size less than min_disk_size
if disk_usage.total <= self._min_disk_size:
if disk_usage.total > 0:
self.log.info('Excluding device %s with total disk size %s', part.device, disk_usage.total)
continue
self.log.debug('Passed: %s', part.device)
tags = self._get_tags(part)
for metric_name, metric_value in iteritems(self._collect_part_metrics(part, disk_usage)):
self.gauge(metric_name, metric_value, tags=tags)
# Add in a disk read write or read only check
if self._service_check_rw:
rwro = {'rw', 'ro'} & set(part.opts.split(','))
if len(rwro) == 1:
self.service_check(
'disk.read_write', AgentCheck.OK if rwro.pop() == 'rw' else AgentCheck.CRITICAL, tags=tags
)
else:
self.service_check('disk.read_write', AgentCheck.UNKNOWN, tags=tags)
self.collect_latency_metrics()
def _get_tags(self, part):
device_name = part.mountpoint if self._use_mount else part.device
tags = [part.fstype, 'filesystem:{}'.format(part.fstype)] if self._tag_by_filesystem else []
tags.extend(self._custom_tags)
# apply device-specific tags
device_specific_tags = self._get_device_specific_tags(device_name)
tags.extend(device_specific_tags)
# apply device labels as tags (from blkid or lsblk).
# we want to use the real device name and not the device_name (which can be the mountpoint)
if self.devices_label.get(part.device):
tags.extend(self.devices_label.get(part.device))
# legacy check names c: vs psutil name C:\\
if Platform.is_win32():
device_name = device_name.strip('\\').lower()
tags.append('device:{}'.format(device_name))
tags.append('device_name:{}'.format(_base_device_name(part.device)))
return tags
def exclude_disk(self, part):
# skip cd-rom drives with no disk in it; they may raise
# ENOENT, pop-up a Windows GUI error for a non-ready
# partition or just hang;
# and all the other excluded disks
skip_win = Platform.is_win32() and ('cdrom' in part.opts or part.fstype == '')
return skip_win or self._exclude_disk(part.device, part.fstype, part.mountpoint)
def _exclude_disk(self, device, file_system, mount_point):
"""
Return True for disks we don't want or that match regex in the config file
"""
if not device or device == 'none':
device = None
# Allow no device if `all_partitions` is true so we can evaluate mount points
if not self._all_partitions:
return True
# Hack for NFS secure mounts
# Secure mounts might look like this: '/mypath (deleted)', we should
# ignore all the bits not part of the mount point name. Take also into
# account a space might be in the mount point.
mount_point = mount_point.rsplit(' ', 1)[0]
return self._partition_excluded(device, file_system, mount_point) or not self._partition_included(
device, file_system, mount_point
)
def _partition_included(self, device, file_system, mount_point):
return (
self._file_system_included(file_system)
and self._device_included(device)
and self._mount_point_included(mount_point)
)
def _partition_excluded(self, device, file_system, mount_point):
return (
self._file_system_excluded(file_system)
or self._device_excluded(device)
or self._mount_point_excluded(mount_point)
)
def _file_system_included(self, file_system):
if self._file_system_include is None:
return True
return not not self._file_system_include.match(file_system)
def _file_system_excluded(self, file_system):
if self._file_system_exclude is None:
return False
return not not self._file_system_exclude.match(file_system)
def _device_included(self, device):
if not device or self._device_include is None:
return True
return not not self._device_include.match(device)
def _device_excluded(self, device):
if not device or self._device_exclude is None:
return False
return not not self._device_exclude.match(device)
def _mount_point_included(self, mount_point):
if self._mount_point_include is None:
return True
return not not self._mount_point_include.match(mount_point)
def _mount_point_excluded(self, mount_point):
if self._mount_point_exclude is None:
return False
return not not self._mount_point_exclude.match(mount_point)
def _collect_part_metrics(self, part, usage):
metrics = {}
for name in ['total', 'used', 'free']:
# For legacy reasons, the standard unit it kB
metrics[self.METRIC_DISK.format(name)] = getattr(usage, name) / 1024
# FIXME: 8.x, use percent, a lot more logical than in_use
metrics[self.METRIC_DISK.format('in_use')] = usage.percent / 100
if Platform.is_unix():
metrics.update(self._collect_inodes_metrics(part.mountpoint))
return metrics
def _collect_inodes_metrics(self, mountpoint):
metrics = {}
# we need to timeout this, too.
try:
inodes = timeout(self._timeout)(os.statvfs)(mountpoint)
except TimeoutException:
self.log.warning(
u'Timeout after %d seconds while retrieving the disk usage of `%s` mountpoint. '
u'You might want to change the timeout length in the settings.',
self._timeout,
mountpoint,
)
return metrics
except Exception as e:
self.log.warning(
u'Unable to get disk metrics for %s: %s. '
u'You can exclude this mountpoint in the settings if it is invalid.',
mountpoint,
e,
)
return metrics
if inodes.f_files != 0:
total = inodes.f_files
free = inodes.f_ffree
metrics[self.METRIC_INODE.format('total')] = total
metrics[self.METRIC_INODE.format('free')] = free
metrics[self.METRIC_INODE.format('used')] = total - free
# FIXME: 8.x, use percent, a lot more logical than in_use
metrics[self.METRIC_INODE.format('in_use')] = (total - free) / total
return metrics
def collect_latency_metrics(self):
for disk_name, disk in iteritems(psutil.disk_io_counters(True)):
self.log.debug('IO Counters: %s -> %s', disk_name, disk)
try:
metric_tags = [] if self._custom_tags is None else self._custom_tags[:]
device_specific_tags = self._get_device_specific_tags(disk_name)
metric_tags.extend(device_specific_tags)
metric_tags.append('device:{}'.format(disk_name))
metric_tags.append('device_name:{}'.format(_base_device_name(disk_name)))
if self.devices_label.get(disk_name):
metric_tags.extend(self.devices_label.get(disk_name))
self.monotonic_count(self.METRIC_DISK.format('read_time'), disk.read_time, tags=metric_tags)
self.monotonic_count(self.METRIC_DISK.format('write_time'), disk.write_time, tags=metric_tags)
# FIXME: 8.x, metrics kept for backwards compatibility but are incorrect: the value is not a percentage
# See: https://github.com/DataDog/integrations-core/pull/7323#issuecomment-756427024
self.rate(self.METRIC_DISK.format('read_time_pct'), disk.read_time * 100 / 1000, tags=metric_tags)
self.rate(self.METRIC_DISK.format('write_time_pct'), disk.write_time * 100 / 1000, tags=metric_tags)
except AttributeError as e:
# Some OS don't return read_time/write_time fields
# http://psutil.readthedocs.io/en/latest/#psutil.disk_io_counters
self.log.debug('Latency metrics not collected for %s: %s', disk_name, e)
def _compile_pattern_filters(self, instance):
file_system_exclude_extras = self.init_config.get(
'file_system_global_exclude',
self.init_config.get('file_system_global_blacklist', self.get_default_file_system_exclude()),
)
device_exclude_extras = self.init_config.get(
'device_global_exclude', self.init_config.get('device_global_blacklist', self.get_default_device_exclude())
)
mount_point_exclude_extras = self.init_config.get(
'mount_point_global_exclude',
self.init_config.get('mount_point_global_blacklist', self.get_default_mount_mount_exclude()),
)
if 'excluded_filesystems' in instance:
file_system_exclude_extras.extend(
'{}$'.format(pattern) for pattern in instance['excluded_filesystems'] if pattern
)
if 'excluded_disks' in instance:
device_exclude_extras.extend('{}$'.format(pattern) for pattern in instance['excluded_disks'] if pattern)
if 'excluded_disk_re' in instance:
device_exclude_extras.append(instance['excluded_disk_re'])
if 'excluded_mountpoint_re' in instance:
mount_point_exclude_extras.append(instance['excluded_mountpoint_re'])
# Any without valid patterns will become None
self._file_system_include = self._compile_valid_patterns(self._file_system_include, casing=re.I)
self._file_system_exclude = self._compile_valid_patterns(
self._file_system_exclude, casing=re.I, extra_patterns=file_system_exclude_extras
)
self._device_include = self._compile_valid_patterns(self._device_include)
self._device_exclude = self._compile_valid_patterns(self._device_exclude, extra_patterns=device_exclude_extras)
self._mount_point_include = self._compile_valid_patterns(self._mount_point_include)
self._mount_point_exclude = self._compile_valid_patterns(
self._mount_point_exclude, extra_patterns=mount_point_exclude_extras
)
def _compile_valid_patterns(self, patterns, casing=IGNORE_CASE, extra_patterns=None):
valid_patterns = []
if isinstance(patterns, string_types):
patterns = [patterns]
else:
patterns = list(patterns)
if extra_patterns:
for extra_pattern in extra_patterns:
if extra_pattern not in patterns:
patterns.append(extra_pattern)
for pattern in patterns:
# Ignore empty patterns as they match everything
if not pattern:
continue
try:
re.compile(pattern, casing)
except Exception:
self.log.warning('%s is not a valid regular expression and will be ignored', pattern)
else:
valid_patterns.append(pattern)
if valid_patterns:
return re.compile('|'.join(valid_patterns), casing)
def _compile_tag_re(self):
"""
Compile regex strings from device_tag_re option and return list of compiled regex/tag pairs
"""
device_tag_list = []
for regex_str, tags in iteritems(self._device_tag_re):
try:
device_tag_list.append([re.compile(regex_str, IGNORE_CASE), [t.strip() for t in tags.split(',')]])
except TypeError:
self.log.warning('%s is not a valid regular expression and will be ignored', regex_str)
self._device_tag_re = device_tag_list
def _get_devices_label(self):
"""
Get every label to create tags and returns a map of device name to label:value
"""
if self._use_lsblk:
return self._get_devices_label_from_lsblk()
elif not self._blkid_cache_file:
return self._get_devices_label_from_blkid()
return self._get_devices_label_from_blkid_cache()
def _get_devices_label_from_lsblk(self):
"""
Get device labels using the `lsblk` command. Returns a map of device name to label:value
"""
devices_labels = dict()
try:
# Use raw output mode (space-separated fields encoded in UTF-8).
# We want to be compatible with lsblk version 2.19 since
# it is the last version supported by CentOS 6 and SUSE 11.
lsblk_out, _, _ = get_subprocess_output(["lsblk", "--noheadings", "--raw", "--output=NAME,LABEL"], self.log)
for line in lsblk_out.splitlines():
device, _, label = line.partition(' ')
if label:
# Line sample (device "/dev/sda1" with label " MY LABEL")
# sda1 MY LABEL
devices_labels["/dev/" + device] = ['label:{}'.format(label), 'device_label:{}'.format(label)]
except SubprocessOutputEmptyError:
self.log.debug("Couldn't use lsblk to have device labels")
return devices_labels
def _get_devices_label_from_blkid(self):
devices_label = {}
try:
blkid_out, _, _ = get_subprocess_output(['blkid'], self.log)
all_devices = [l.split(':', 1) for l in blkid_out.splitlines()]
for d in all_devices:
# Line sample
# /dev/sda1: LABEL="MYLABEL" UUID="5eea373d-db36-4ce2-8c71-12ce544e8559" TYPE="ext4"
labels = self._blkid_label_re.findall(d[1])
if labels:
devices_label[d[0]] = ['label:{}'.format(labels[0]), 'device_label:{}'.format(labels[0])]
except SubprocessOutputEmptyError:
self.log.debug("Couldn't use blkid to have device labels")
return devices_label
def _get_devices_label_from_blkid_cache(self):
devices_label = {}
try:
with open(self._blkid_cache_file, 'r') as blkid_cache_file_handler:
blkid_cache_data = blkid_cache_file_handler.readlines()
except IOError as e:
self.log.warning("Couldn't read the blkid cache file %s: %s", self._blkid_cache_file, e)
return devices_label
# Line sample
# <device DEVNO="0x0801" LABEL="MYLABEL" UUID="..." TYPE="ext4">/dev/sda1</device>
for line in blkid_cache_data:
try:
root = ET.fromstring(line)
device = root.text
label = root.attrib.get('LABEL')
if label and device:
devices_label[device] = ['label:{}'.format(label), 'device_label:{}'.format(label)]
except ET.ParseError as e:
self.log.warning(
'Failed to parse line %s because of %s - skipping the line (some labels might be missing)', line, e
)
return devices_label
def _get_device_specific_tags(self, device_name):
device_specific_tags = []
# apply device/mountpoint specific tags
for regex, device_tags in self._device_tag_re:
if regex.match(device_name):
device_specific_tags.extend(device_tags)
return device_specific_tags
def _create_manual_mounts(self):
"""
on Windows, in order to collect statistics on remote (SMB/NFS) drives, the drive must be mounted
as the agent user in the agent context, otherwise the agent can't 'see' the drive. If so configured,
attempt to mount desired drives
"""
if not self._manual_mounts:
self.log.debug("No manual mounts")
else:
self.log.debug("Attempting to create %d mounts: ", len(self._manual_mounts))
for manual_mount in self._manual_mounts:
remote_machine = manual_mount.get('host')
share = manual_mount.get('share')
uname = manual_mount.get('user')
pword = manual_mount.get('password')
mtype = manual_mount.get('type')
mountpoint = manual_mount.get('mountpoint')
nr = win32wnet.NETRESOURCE()
if not remote_machine or not share:
self.log.error("Invalid configuration. Drive mount requires remote machine and share point")
continue
if mtype and mtype.lower() == "nfs":
nr.lpRemoteName = r"{}:{}".format(remote_machine, share)
self.log.debug("Attempting NFS mount: %s", nr.lpRemoteName)
else:
nr.lpRemoteName = r"\\{}\{}".format(remote_machine, share).rstrip('\\')
self.log.debug("Attempting SMB mount: %s", nr.lpRemoteName)
nr.dwType = 0
nr.lpLocalName = mountpoint
try:
win32wnet.WNetAddConnection2(nr, pword, uname, 0)
self.log.debug("Successfully mounted %s as %s", mountpoint, nr.lpRemoteName)
except Exception as e:
self.log.error("Failed to mount %s %s", nr.lpRemoteName, str(e))
pass
@staticmethod
def get_default_file_system_exclude():
return [
# CDROM
'iso9660$',
]
@staticmethod
def get_default_device_exclude():
return []
@staticmethod
def get_default_mount_mount_exclude():
return [
# https://github.com/DataDog/datadog-agent/issues/1961
# https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2018-1049
'(/host)?/proc/sys/fs/binfmt_misc$'
]
| {
"content_hash": "b0f61c6281419541393108a747e9f981",
"timestamp": "",
"source": "github",
"line_count": 559,
"max_line_length": 120,
"avg_line_length": 43.42397137745975,
"alnum_prop": 0.5903435774903188,
"repo_name": "DataDog/integrations-core",
"id": "84729bb460d5362b79b6dfd3abd8d78391a4ef3b",
"size": "24389",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "disk/datadog_checks/disk/disk.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "578"
},
{
"name": "COBOL",
"bytes": "12312"
},
{
"name": "Dockerfile",
"bytes": "22998"
},
{
"name": "Erlang",
"bytes": "15518"
},
{
"name": "Go",
"bytes": "6988"
},
{
"name": "HCL",
"bytes": "4080"
},
{
"name": "HTML",
"bytes": "1318"
},
{
"name": "JavaScript",
"bytes": "1817"
},
{
"name": "Kotlin",
"bytes": "430"
},
{
"name": "Lua",
"bytes": "3489"
},
{
"name": "PHP",
"bytes": "20"
},
{
"name": "PowerShell",
"bytes": "2398"
},
{
"name": "Python",
"bytes": "13020828"
},
{
"name": "Roff",
"bytes": "359"
},
{
"name": "Ruby",
"bytes": "241"
},
{
"name": "Scala",
"bytes": "7000"
},
{
"name": "Shell",
"bytes": "83227"
},
{
"name": "Swift",
"bytes": "203"
},
{
"name": "TSQL",
"bytes": "29972"
},
{
"name": "TypeScript",
"bytes": "1019"
}
],
"symlink_target": ""
} |
from pygw.base.java_transformer import JavaTransformer
from pygw.base.type_conversions import PrimitiveByteArrayType
class BinnedStatisticTransformer(JavaTransformer):
"""
Transforms a binned statistic value into a tuple.
"""
def __init__(self, statistic_value_transformer):
self._statistic_value_transformer = statistic_value_transformer
self._byte_array_type = PrimitiveByteArrayType()
super().__init__()
def transform(self, j_object):
"""
Transform the given bin/value pair into a tuple.
Args:
j_object (Java Pair): The bin/value pair.
Returns:
A tuple of bin bytes and statistic value.
"""
return self._byte_array_type.from_java(j_object.getKey().getBytes()),\
self._statistic_value_transformer.transform(j_object.getValue())
| {
"content_hash": "4e20af85aec926c5cc429b71c5fe17e5",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 78,
"avg_line_length": 34.52,
"alnum_prop": 0.6593279258400927,
"repo_name": "locationtech/geowave",
"id": "5da4769394d5345e641c3c0defefc777c4e73a35",
"size": "1382",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/src/main/python/pygw/statistics/transformers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "10168"
},
{
"name": "Dockerfile",
"bytes": "3268"
},
{
"name": "FreeMarker",
"bytes": "2879"
},
{
"name": "Gnuplot",
"bytes": "57750"
},
{
"name": "Java",
"bytes": "11564564"
},
{
"name": "Puppet",
"bytes": "8849"
},
{
"name": "Python",
"bytes": "418256"
},
{
"name": "Scheme",
"bytes": "20491"
},
{
"name": "Shell",
"bytes": "100172"
}
],
"symlink_target": ""
} |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
from ambari_commons.os_check import OSCheck
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
# components_lits = repoName + postfix
_UBUNTU_REPO_COMPONENTS_POSTFIX = ["main"]
REPO_FILE_NAME_PREFIX = 'IOP-'
STACK_TO_ROOT_FOLDER = {"IOP": "/usr/iop", "BIGINSIGHTS":"/usr/iop"}
def _alter_repo(action, repo_string, repo_template):
"""
@param action: "delete" or "create"
@param repo_string: e.g. "[{\"baseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\",\"osType\":\"centos6\",\"repoId\":\"HDP-2.0._\",\"repoName\":\"HDP\",\"defaultBaseUrl\":\"http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.0.6.0\"}]"
"""
repo_dicts = json.loads(repo_string)
if not isinstance(repo_dicts, list):
repo_dicts = [repo_dicts]
for repo in repo_dicts:
if not 'baseUrl' in repo:
repo['baseUrl'] = None
if not 'mirrorsList' in repo:
repo['mirrorsList'] = None
ubuntu_components = [ repo['repoName'] ] + _UBUNTU_REPO_COMPONENTS_POSTFIX
Repository(repo['repoId'],
action = action,
base_url = repo['baseUrl'],
mirror_list = repo['mirrorsList'],
repo_file_name = repo['repoName'],
repo_template = repo_template,
components = ubuntu_components, # ubuntu specific
)
def install_repos():
import params
if params.host_sys_prepped:
return
template = params.repo_rhel_suse if OSCheck.is_suse_family() or OSCheck.is_redhat_family() else params.repo_ubuntu
_alter_repo("create", params.repo_info, template)
if params.service_repo_info:
_alter_repo("create", params.service_repo_info, template)
| {
"content_hash": "85f4c047a42f9d0d1166e19545078727",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 278,
"avg_line_length": 40.53968253968254,
"alnum_prop": 0.6949882537196554,
"repo_name": "arenadata/ambari",
"id": "dffcc391c9c134407cf052d7b64ce1a33b7f3440",
"size": "2554",
"binary": false,
"copies": "2",
"ref": "refs/heads/branch-adh-1.6",
"path": "ambari-server/src/main/resources/stacks/BigInsights/4.0/hooks/before-INSTALL/scripts/repo_initialization.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "46700"
},
{
"name": "C",
"bytes": "331204"
},
{
"name": "C#",
"bytes": "215907"
},
{
"name": "C++",
"bytes": "257"
},
{
"name": "CSS",
"bytes": "343739"
},
{
"name": "CoffeeScript",
"bytes": "8465"
},
{
"name": "Dockerfile",
"bytes": "6387"
},
{
"name": "EJS",
"bytes": "777"
},
{
"name": "FreeMarker",
"bytes": "2654"
},
{
"name": "Gherkin",
"bytes": "990"
},
{
"name": "Groovy",
"bytes": "15882"
},
{
"name": "HTML",
"bytes": "717983"
},
{
"name": "Handlebars",
"bytes": "1819641"
},
{
"name": "Java",
"bytes": "29172298"
},
{
"name": "JavaScript",
"bytes": "18571926"
},
{
"name": "Jinja",
"bytes": "1490416"
},
{
"name": "Less",
"bytes": "412933"
},
{
"name": "Makefile",
"bytes": "11111"
},
{
"name": "PHP",
"bytes": "149648"
},
{
"name": "PLpgSQL",
"bytes": "287501"
},
{
"name": "PowerShell",
"bytes": "2090340"
},
{
"name": "Python",
"bytes": "18507704"
},
{
"name": "R",
"bytes": "3943"
},
{
"name": "Ruby",
"bytes": "38590"
},
{
"name": "SCSS",
"bytes": "40072"
},
{
"name": "Shell",
"bytes": "924115"
},
{
"name": "Stylus",
"bytes": "820"
},
{
"name": "TSQL",
"bytes": "42351"
},
{
"name": "Vim script",
"bytes": "5813"
},
{
"name": "sed",
"bytes": "2303"
}
],
"symlink_target": ""
} |
"""
Unit tests for robot (dependency resolution).
@author: Toon Willems (Ghent University)
"""
import os
import re
import shutil
import tempfile
from copy import deepcopy
from test.framework.utilities import EnhancedTestCase, init_config
from unittest import TestLoader
from unittest import main as unittestmain
import easybuild.framework.easyconfig.tools as ectools
import easybuild.tools.robot as robot
from easybuild.framework.easyconfig.tools import skip_available
from easybuild.tools import config, modules
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import read_file, write_file
from easybuild.tools.github import fetch_github_token
from easybuild.tools.robot import resolve_dependencies
from test.framework.utilities import find_full_path
# test account, for which a token is available
GITHUB_TEST_ACCOUNT = 'easybuild_test'
ORIG_MODULES_TOOL = modules.modules_tool
ORIG_ECTOOLS_MODULES_TOOL = ectools.modules_tool
ORIG_ROBOT_MODULES_TOOL = robot.modules_tool
ORIG_MODULE_FUNCTION = os.environ.get('module', None)
class MockModule(modules.ModulesTool):
""" MockModule class, allows for controlling what modules_tool() will return """
COMMAND = 'echo'
VERSION_OPTION = '1.0'
VERSION_REGEXP = r'(?P<version>\d\S*)'
# redirect to stderr, ignore 'echo python' ($0 and $1)
COMMAND_SHELL = ["bash", "-c", "echo $2 $3 $4 1>&2"]
avail_modules = []
def available(self, *args):
"""Dummy implementation of available."""
return self.avail_modules
def show(self, modname):
"""Dummy implementation of show, which includes full path to (available or hidden) module files."""
if modname in self.avail_modules or os.path.basename(modname).startswith('.'):
txt = ' %s:' % os.path.join('/tmp', modname)
else:
txt = 'Module %s not found' % modname
return txt
def mock_module(mod_paths=None):
"""Get mock module instance."""
return MockModule(mod_paths=mod_paths)
class RobotTest(EnhancedTestCase):
""" Testcase for the robot dependency resolution """
def setUp(self):
"""Set up test."""
super(RobotTest, self).setUp()
self.github_token = fetch_github_token(GITHUB_TEST_ACCOUNT)
def xtest_resolve_dependencies(self):
""" Test with some basic testcases (also check if he can find dependencies inside the given directory """
# replace Modules class with something we have control over
config.modules_tool = mock_module
ectools.modules_tool = mock_module
robot.modules_tool = mock_module
os.environ['module'] = "() { eval `/bin/echo $*`\n}"
base_easyconfig_dir = find_full_path(os.path.join("test", "framework", "easyconfigs"))
self.assertTrue(base_easyconfig_dir)
easyconfig = {
'spec': '_',
'full_mod_name': 'name/version',
'short_mod_name': 'name/version',
'dependencies': []
}
build_options = {
'allow_modules_tool_mismatch': True,
'robot_path': None,
'validate': False,
}
init_config(build_options=build_options)
res = resolve_dependencies([deepcopy(easyconfig)])
self.assertEqual([easyconfig], res)
easyconfig_dep = {
'ec': {
'name': 'foo',
'version': '1.2.3',
'versionsuffix': '',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
},
'spec': '_',
'short_mod_name': 'foo/1.2.3',
'full_mod_name': 'foo/1.2.3',
'dependencies': [{
'name': 'gzip',
'version': '1.4',
'versionsuffix': '',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'dummy': True,
'hidden': False,
}],
'parsed': True,
}
build_options.update({'robot': True, 'robot_path': base_easyconfig_dir})
init_config(build_options=build_options)
res = resolve_dependencies([deepcopy(easyconfig_dep)])
# dependency should be found, order should be correct
self.assertEqual(len(res), 2)
self.assertEqual('gzip/1.4', res[0]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[-1]['full_mod_name'])
# hidden dependencies are found too, but only retained if they're not available (or forced to be retained
hidden_dep = {
'name': 'toy',
'version': '0.0',
'versionsuffix': '-deps',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'dummy': True,
'hidden': True,
}
easyconfig_moredeps = deepcopy(easyconfig_dep)
easyconfig_moredeps['dependencies'].append(hidden_dep)
easyconfig_moredeps['hiddendependencies'] = [hidden_dep]
# toy/.0.0-deps is available and thus should be omitted
res = resolve_dependencies([deepcopy(easyconfig_moredeps)])
self.assertEqual(len(res), 2)
full_mod_names = [ec['full_mod_name'] for ec in res]
self.assertFalse('toy/.0.0-deps' in full_mod_names)
res = resolve_dependencies([deepcopy(easyconfig_moredeps)], retain_all_deps=True)
self.assertEqual(len(res), 4) # hidden dep toy/.0.0-deps (+1) depends on (fake) ictce/4.1.13 (+1)
self.assertEqual('gzip/1.4', res[0]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[-1]['full_mod_name'])
full_mod_names = [ec['full_mod_name'] for ec in res]
self.assertTrue('toy/.0.0-deps' in full_mod_names)
self.assertTrue('ictce/4.1.13' in full_mod_names)
# here we have included a dependency in the easyconfig list
easyconfig['full_mod_name'] = 'gzip/1.4'
ecs = [deepcopy(easyconfig_dep), deepcopy(easyconfig)]
build_options.update({'robot_path': None})
init_config(build_options=build_options)
res = resolve_dependencies(ecs)
# all dependencies should be resolved
self.assertEqual(0, sum(len(ec['dependencies']) for ec in res))
# this should not resolve (cannot find gzip-1.4.eb), both with and without robot enabled
ecs = [deepcopy(easyconfig_dep)]
msg = "Irresolvable dependencies encountered"
self.assertErrorRegex(EasyBuildError, msg, resolve_dependencies, ecs)
# test if dependencies of an automatically found file are also loaded
easyconfig_dep['dependencies'] = [{
'name': 'gzip',
'version': '1.4',
'versionsuffix': '',
'toolchain': {'name': 'GCC', 'version': '4.6.3'},
'dummy': True,
'hidden': False,
}]
ecs = [deepcopy(easyconfig_dep)]
build_options.update({'robot_path': base_easyconfig_dir})
init_config(build_options=build_options)
res = resolve_dependencies([deepcopy(easyconfig_dep)])
# GCC should be first (required by gzip dependency)
self.assertEqual('GCC/4.6.3', res[0]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[-1]['full_mod_name'])
# make sure that only missing stuff is built, and that available modules are not rebuilt
# monkey patch MockModule to pretend that all ingredients required for goolf-1.4.10 toolchain are present
MockModule.avail_modules = [
'GCC/4.7.2',
'OpenMPI/1.6.4-GCC-4.7.2',
'OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2',
'FFTW/3.3.3-gompi-1.4.10',
'ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2',
]
easyconfig_dep['dependencies'] = [{
'name': 'goolf',
'version': '1.4.10',
'versionsuffix': '',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'dummy': True,
'hidden': False,
}]
ecs = [deepcopy(easyconfig_dep)]
res = resolve_dependencies(ecs)
# there should only be two retained builds, i.e. the software itself and the goolf toolchain as dep
self.assertEqual(len(res), 2)
# goolf should be first, the software itself second
self.assertEqual('goolf/1.4.10', res[0]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[1]['full_mod_name'])
# force doesn't trigger rebuild of all deps, but listed easyconfigs for which a module is available are rebuilt
build_options.update({'force': True})
init_config(build_options=build_options)
easyconfig['full_mod_name'] = 'this/is/already/there'
MockModule.avail_modules.append('this/is/already/there')
ecs = [deepcopy(easyconfig_dep), deepcopy(easyconfig)]
res = resolve_dependencies(ecs)
# there should only be three retained builds, foo + goolf dep and the additional build (even though a module is available)
self.assertEqual(len(res), 3)
# goolf should be first, the software itself second
self.assertEqual('this/is/already/there', res[0]['full_mod_name'])
self.assertEqual('goolf/1.4.10', res[1]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[2]['full_mod_name'])
# build that are listed but already have a module available are not retained without force
build_options.update({'force': False})
init_config(build_options=build_options)
newecs = skip_available(ecs) # skip available builds since force is not enabled
res = resolve_dependencies(newecs)
self.assertEqual(len(res), 2)
self.assertEqual('goolf/1.4.10', res[0]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[1]['full_mod_name'])
# with retain_all_deps enabled, all dependencies ae retained
build_options.update({'retain_all_deps': True})
init_config(build_options=build_options)
ecs = [deepcopy(easyconfig_dep)]
newecs = skip_available(ecs) # skip available builds since force is not enabled
res = resolve_dependencies(newecs)
self.assertEqual(len(res), 9)
self.assertEqual('GCC/4.7.2', res[0]['full_mod_name'])
self.assertEqual('goolf/1.4.10', res[-2]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[-1]['full_mod_name'])
build_options.update({'retain_all_deps': False})
init_config(build_options=build_options)
# provide even less goolf ingredients (no OpenBLAS/ScaLAPACK), make sure the numbers add up
MockModule.avail_modules = [
'GCC/4.7.2',
'OpenMPI/1.6.4-GCC-4.7.2',
'gompi/1.4.10',
'FFTW/3.3.3-gompi-1.4.10',
]
easyconfig_dep['dependencies'] = [{
'name': 'goolf',
'version': '1.4.10',
'versionsuffix': '',
'toolchain': {'name': 'dummy', 'version': 'dummy'},
'dummy': True,
'hidden': False,
}]
ecs = [deepcopy(easyconfig_dep)]
res = resolve_dependencies([deepcopy(easyconfig_dep)])
# there should only be two retained builds, i.e. the software itself and the goolf toolchain as dep
self.assertEqual(len(res), 4)
# goolf should be first, the software itself second
self.assertEqual('OpenBLAS/0.2.6-gompi-1.4.10-LAPACK-3.4.2', res[0]['full_mod_name'])
self.assertEqual('ScaLAPACK/2.0.2-gompi-1.4.10-OpenBLAS-0.2.6-LAPACK-3.4.2', res[1]['full_mod_name'])
self.assertEqual('goolf/1.4.10', res[2]['full_mod_name'])
self.assertEqual('foo/1.2.3', res[3]['full_mod_name'])
config.modules_tool = ORIG_MODULES_TOOL
ectools.modules_tool = ORIG_ECTOOLS_MODULES_TOOL
robot.modules_tool = ORIG_ROBOT_MODULES_TOOL
if ORIG_MODULE_FUNCTION is not None:
os.environ['module'] = ORIG_MODULE_FUNCTION
else:
if 'module' in os.environ:
del os.environ['module']
def test_det_easyconfig_paths(self):
"""Test det_easyconfig_paths function (without --from-pr)."""
fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
test_ecs_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
test_ec = 'toy-0.0-deps.eb'
shutil.copy2(os.path.join(test_ecs_path, test_ec), self.test_prefix)
shutil.copy2(os.path.join(test_ecs_path, 'ictce-4.1.13.eb'), self.test_prefix)
self.assertFalse(os.path.exists(test_ec))
args = [
os.path.join(test_ecs_path, 'toy-0.0.eb'),
test_ec, # relative path, should be resolved via robot search path
# PR for foss/2015a, see https://github.com/hpcugent/easybuild-easyconfigs/pull/1239/files
#'--from-pr=1239',
'--dry-run',
'--debug',
'--robot',
'--robot-paths=%s' % self.test_prefix, # override $EASYBUILD_ROBOT_PATHS
'--unittest-file=%s' % self.logfile,
'--github-user=%s' % GITHUB_TEST_ACCOUNT, # a GitHub token should be available for this user
'--tmpdir=%s' % self.test_prefix,
]
outtxt = self.eb_main(args, logfile=dummylogfn, raise_error=True)
modules = [
(test_ecs_path, 'toy/0.0'), # specified easyconfigs, available at given location
(self.test_prefix, 'ictce/4.1.13'), # dependency, found in robot search path
(self.test_prefix, 'toy/0.0-deps'), # specified easyconfig, found in robot search path
]
for path_prefix, module in modules:
ec_fn = "%s.eb" % '-'.join(module.split('/'))
regex = re.compile(r"^ \* \[.\] %s.*%s \(module: %s\)$" % (path_prefix, ec_fn, module), re.M)
self.assertTrue(regex.search(outtxt), "Found pattern %s in %s" % (regex.pattern, outtxt))
def test_det_easyconfig_paths_from_pr(self):
"""Test det_easyconfig_paths function, with --from-pr enabled as well."""
if self.github_token is None:
print "Skipping test_from_pr, no GitHub token available?"
return
fd, dummylogfn = tempfile.mkstemp(prefix='easybuild-dummy', suffix='.log')
os.close(fd)
test_ecs_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'easyconfigs')
test_ec = 'toy-0.0-deps.eb'
shutil.copy2(os.path.join(test_ecs_path, test_ec), self.test_prefix)
shutil.copy2(os.path.join(test_ecs_path, 'ictce-4.1.13.eb'), self.test_prefix)
self.assertFalse(os.path.exists(test_ec))
gompi_2015a_txt = '\n'.join([
"easyblock = 'Toolchain'",
"name = 'gompi'",
"version = '2015a'",
"versionsuffix = '-test'",
"homepage = 'foo'",
"description = 'bar'",
"toolchain = {'name': 'dummy', 'version': 'dummy'}",
])
write_file(os.path.join(self.test_prefix, 'gompi-2015a-test.eb'), gompi_2015a_txt)
# put gompi-2015a.eb easyconfig in place that shouldn't be considered (paths via --from-pr have precedence)
write_file(os.path.join(self.test_prefix, 'gompi-2015a.eb'), gompi_2015a_txt)
args = [
os.path.join(test_ecs_path, 'toy-0.0.eb'),
test_ec, # relative path, should be resolved via robot search path
# PR for foss/2015a, see https://github.com/hpcugent/easybuild-easyconfigs/pull/1239/files
'--from-pr=1239',
'FFTW-3.3.4-gompi-2015a.eb',
'gompi-2015a-test.eb', # relative path, available in robot search path
'--dry-run',
'--robot',
'--robot=%s' % self.test_prefix,
'--unittest-file=%s' % self.logfile,
'--github-user=%s' % GITHUB_TEST_ACCOUNT, # a GitHub token should be available for this user
'--tmpdir=%s' % self.test_prefix,
]
outtxt = self.eb_main(args, logfile=dummylogfn, raise_error=True)
from_pr_prefix = os.path.join(self.test_prefix, '.*', 'files_pr1239')
modules = [
(test_ecs_path, 'toy/0.0'), # specified easyconfigs, available at given location
(self.test_prefix, 'ictce/4.1.13'), # dependency, found in robot search path
(self.test_prefix, 'toy/0.0-deps'), # specified easyconfig, found in robot search path
(self.test_prefix, 'gompi/2015a-test'), # specified easyconfig, found in robot search path
(from_pr_prefix, 'FFTW/3.3.4-gompi-2015a'), # part of PR easyconfigs
(from_pr_prefix, 'gompi/2015a'), # part of PR easyconfigs
(test_ecs_path, 'GCC/4.9.2'), # dependency for PR easyconfigs, found in robot search path
]
for path_prefix, module in modules:
ec_fn = "%s.eb" % '-'.join(module.split('/'))
regex = re.compile(r"^ \* \[.\] %s.*%s \(module: %s\)$" % (path_prefix, ec_fn, module), re.M)
self.assertTrue(regex.search(outtxt), "Found pattern %s in %s" % (regex.pattern, outtxt))
def suite():
""" returns all the testcases in this module """
return TestLoader().loadTestsFromTestCase(RobotTest)
if __name__ == '__main__':
unittestmain()
| {
"content_hash": "1545e0757d446630e8a0c1ad0542d5b7",
"timestamp": "",
"source": "github",
"line_count": 390,
"max_line_length": 130,
"avg_line_length": 44.228205128205126,
"alnum_prop": 0.5983535277407386,
"repo_name": "ULHPC/modules",
"id": "5898160f9cb0c3e9f59fb26c10caec42c6ff42fb",
"size": "18309",
"binary": false,
"copies": "4",
"ref": "refs/heads/devel",
"path": "easybuild/easybuild-framework/test/framework/robot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Groff",
"bytes": "36174"
},
{
"name": "Perl",
"bytes": "34780"
},
{
"name": "Python",
"bytes": "2711250"
},
{
"name": "Ruby",
"bytes": "932"
},
{
"name": "Shell",
"bytes": "51560"
}
],
"symlink_target": ""
} |
from matplotlib.path import Path
from matplotlib.patches import BoxStyle
import matplotlib.pyplot as plt
# we may derive from matplotlib.patches.BoxStyle._Base class.
# You need to overide transmute method in this case.
class MyStyle(BoxStyle._Base):
"""
A simple box.
"""
def __init__(self, pad=0.3):
"""
The arguments need to be floating numbers and need to have
default values.
*pad*
amount of padding
"""
self.pad = pad
super(MyStyle, self).__init__()
def transmute(self, x0, y0, width, height, mutation_size):
"""
Given the location and size of the box, return the path of
the box around it.
- *x0*, *y0*, *width*, *height* : location and size of the box
- *mutation_size* : a reference scale for the mutation.
Often, the *mutation_size* is the font size of the text.
You don't need to worry about the rotation as it is
automatically taken care of.
"""
# padding
pad = mutation_size * self.pad
# width and height with padding added.
width, height = width + 2.*pad, \
height + 2.*pad,
# boundary of the padded box
x0, y0 = x0-pad, y0-pad,
x1, y1 = x0+width, y0 + height
cp = [(x0, y0),
(x1, y0), (x1, y1), (x0, y1),
(x0-pad, (y0+y1)/2.), (x0, y0),
(x0, y0)]
com = [Path.MOVETO,
Path.LINETO, Path.LINETO, Path.LINETO,
Path.LINETO, Path.LINETO,
Path.CLOSEPOLY]
path = Path(cp, com)
return path
# register the custom style
BoxStyle._style_list["angled"] = MyStyle
plt.figure(1, figsize=(3,3))
ax = plt.subplot(111)
ax.text(0.5, 0.5, "Test", size=30, va="center", ha="center", rotation=30,
bbox=dict(boxstyle="angled,pad=0.5", alpha=0.2))
del BoxStyle._style_list["angled"]
plt.show()
| {
"content_hash": "ff7be18356060bc0555ed12b9291063c",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 73,
"avg_line_length": 26.7972972972973,
"alnum_prop": 0.5572365103378719,
"repo_name": "yavalvas/yav_com",
"id": "96933cb068979fb50f0434aa2ff9e97d0005605e",
"size": "1983",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "build/matplotlib/doc/users/plotting/examples/custom_boxstyle02.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "85377"
},
{
"name": "C++",
"bytes": "568744"
},
{
"name": "CSS",
"bytes": "47585"
},
{
"name": "Erlang",
"bytes": "7112"
},
{
"name": "HTML",
"bytes": "14865"
},
{
"name": "JavaScript",
"bytes": "359937"
},
{
"name": "Objective-C",
"bytes": "188937"
},
{
"name": "Perl",
"bytes": "229498"
},
{
"name": "Python",
"bytes": "7684946"
},
{
"name": "Shell",
"bytes": "1805"
}
],
"symlink_target": ""
} |
import copy
import mock
from neutronclient.common import exceptions as neutron_exception
from ec2api.api import ec2utils
from ec2api.tests.unit import base
from ec2api.tests.unit import fakes
from ec2api.tests.unit import matchers
from ec2api.tests.unit import tools
class NetworkInterfaceTestCase(base.ApiTestCase):
def test_create_network_interface(self):
self.set_mock_db_items(fakes.DB_SUBNET_1, fakes.DB_VPC_1,
fakes.DB_SECURITY_GROUP_1)
self.db_api.add_item.return_value = fakes.DB_NETWORK_INTERFACE_1
self.neutron.show_subnet.return_value = {'subnet': fakes.OS_SUBNET_1}
self.neutron.create_port.return_value = {'port': fakes.OS_PORT_1}
self.neutron.list_security_groups.return_value = (
{'security_groups': [copy.deepcopy(fakes.OS_SECURITY_GROUP_1)]})
def check_response(resp, auto_ips=False):
self.assertThat(fakes.EC2_NETWORK_INTERFACE_1,
matchers.DictMatches(resp['networkInterface']))
self.db_api.add_item.assert_called_once_with(
mock.ANY, 'eni',
tools.purge_dict(fakes.DB_NETWORK_INTERFACE_1, ('id',)))
if auto_ips:
self.neutron.create_port.assert_called_once_with(
{'port':
{'network_id': fakes.ID_OS_NETWORK_1,
'fixed_ips':
[{'subnet_id': fakes.ID_OS_SUBNET_1}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
else:
self.neutron.create_port.assert_called_once_with(
{'port':
{'network_id': fakes.ID_OS_NETWORK_1,
'fixed_ips':
[{'ip_address': fakes.IP_NETWORK_INTERFACE_1}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
self.neutron.update_port.assert_called_once_with(
fakes.ID_OS_PORT_1,
{'port': {'name':
fakes.ID_EC2_NETWORK_INTERFACE_1}})
self.neutron.reset_mock()
self.db_api.reset_mock()
self.neutron.list_security_groups.return_value = (
{'security_groups': [
copy.deepcopy(fakes.OS_SECURITY_GROUP_1)]})
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1,
'PrivateIpAddress': fakes.IP_NETWORK_INTERFACE_1,
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_1})
check_response(resp)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1,
'PrivateIpAddresses.1.PrivateIpAddress': (
fakes.IP_NETWORK_INTERFACE_1),
'PrivateIpAddresses.1.Primary': True,
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_1})
check_response(resp)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1,
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_1})
check_response(resp, True)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1,
'SecondaryPrivateIpAddressCount': '1',
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_1})
check_response(resp, True)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1,
'SecondaryPrivateIpAddressCount': '0',
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_1})
check_response(resp, True)
def test_create_network_interface_multiple_ips(self):
self.set_mock_db_items(fakes.DB_SUBNET_2, fakes.DB_VPC_1,
fakes.DB_SECURITY_GROUP_1)
self.db_api.add_item.return_value = fakes.DB_NETWORK_INTERFACE_2
self.neutron.show_subnet.return_value = {'subnet': fakes.OS_SUBNET_2}
self.neutron.create_port.return_value = {'port': fakes.OS_PORT_2}
self.neutron.list_security_groups.return_value = (
{'security_groups': [copy.deepcopy(fakes.OS_SECURITY_GROUP_1)]})
created_ec2_network_interface = tools.patch_dict(
fakes.EC2_NETWORK_INTERFACE_2,
{'privateIpAddressesSet': [
tools.purge_dict(s, ['association'])
for s in fakes.EC2_NETWORK_INTERFACE_2[
'privateIpAddressesSet']]},
['association'])
def check_response(resp):
self.assertThat(created_ec2_network_interface,
matchers.DictMatches(resp['networkInterface']))
self.db_api.add_item.assert_called_once_with(
mock.ANY, 'eni',
tools.purge_dict(fakes.DB_NETWORK_INTERFACE_2,
('id',
'device_index',
'instance_id',
'delete_on_termination',
'attach_time')))
self.neutron.update_port.assert_called_once_with(
fakes.ID_OS_PORT_2,
{'port': {'name':
fakes.ID_EC2_NETWORK_INTERFACE_2}})
self.neutron.reset_mock()
self.db_api.reset_mock()
self.neutron.list_security_groups.return_value = (
{'security_groups': [
copy.deepcopy(fakes.OS_SECURITY_GROUP_1)]})
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_2,
'SecondaryPrivateIpAddressCount': '3',
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_2})
self.neutron.create_port.assert_called_once_with(
{'port': {'network_id': fakes.ID_OS_NETWORK_2,
'fixed_ips': [{'subnet_id': fakes.ID_OS_SUBNET_2},
{'subnet_id': fakes.ID_OS_SUBNET_2},
{'subnet_id': fakes.ID_OS_SUBNET_2}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
check_response(resp)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_2,
'PrivateIpAddress': fakes.IPS_NETWORK_INTERFACE_2[0],
'PrivateIpAddresses.1.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[1],
'PrivateIpAddresses.1.Primary': False,
'PrivateIpAddresses.2.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[2],
'PrivateIpAddresses.2.Primary': False,
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_2})
self.neutron.create_port.assert_called_once_with(
{'port':
{'network_id': fakes.ID_OS_NETWORK_2,
'fixed_ips': [
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[0]},
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[1]},
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[2]}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
check_response(resp)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_2,
'PrivateIpAddresses.1.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[0],
'PrivateIpAddresses.1.Primary': True,
'PrivateIpAddresses.2.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[1],
'PrivateIpAddresses.2.Primary': False,
'PrivateIpAddresses.3.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[2],
'PrivateIpAddresses.3.Primary': False,
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_2})
self.neutron.create_port.assert_called_once_with(
{'port':
{'network_id': fakes.ID_OS_NETWORK_2,
'fixed_ips': [
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[0]},
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[1]},
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[2]}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
check_response(resp)
resp = self.execute(
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_2,
'PrivateIpAddress': fakes.IPS_NETWORK_INTERFACE_2[0],
'PrivateIpAddresses.1.PrivateIpAddress':
fakes.IPS_NETWORK_INTERFACE_2[1],
'PrivateIpAddresses.1.Primary': False,
'SecondaryPrivateIpAddressCount': '1',
'Description': fakes.DESCRIPTION_NETWORK_INTERFACE_2})
self.neutron.create_port.assert_called_once_with(
{'port':
{'network_id': fakes.ID_OS_NETWORK_2,
'fixed_ips': [
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[0]},
{'ip_address': fakes.IPS_NETWORK_INTERFACE_2[1]},
{'subnet_id': fakes.ID_OS_SUBNET_2}],
'security_groups': [fakes.ID_OS_SECURITY_GROUP_1]}})
check_response(resp)
def test_create_network_interface_invalid_parameters(self):
def do_check(args, error_code):
self.neutron.reset_mock()
self.db_api.reset_mock()
self.assert_execution_error(
error_code, 'CreateNetworkInterface', args)
self.set_mock_db_items()
do_check({'SubnetId': fakes.ID_EC2_SUBNET_2},
'InvalidSubnetID.NotFound')
self.db_api.get_item_by_id.assert_called_once_with(
mock.ANY, fakes.ID_EC2_SUBNET_2)
self.set_mock_db_items(fakes.DB_SUBNET_1, fakes.DB_VPC_1)
self.neutron.show_subnet.return_value = {'subnet': fakes.OS_SUBNET_1}
do_check({'SubnetId': fakes.ID_EC2_SUBNET_1,
'PrivateIpAddress': fakes.IP_NETWORK_INTERFACE_2},
'InvalidParameterValue')
for cls in [neutron_exception.OverQuotaClient,
neutron_exception.IpAddressGenerationFailureClient]:
self.neutron.create_port.side_effect = cls()
do_check({'SubnetId': fakes.ID_EC2_SUBNET_1,
'PrivateIpAddress': fakes.IP_NETWORK_INTERFACE_1},
'InsufficientFreeAddressesInSubnet')
for cls in [neutron_exception.IpAddressInUseClient,
neutron_exception.BadRequest]:
self.neutron.create_port.side_effect = cls()
do_check({'SubnetId': fakes.ID_EC2_SUBNET_1,
'PrivateIpAddress': fakes.IP_NETWORK_INTERFACE_1},
'InvalidParameterValue')
@tools.screen_unexpected_exception_logs
@mock.patch('ec2api.api.dhcp_options._add_dhcp_opts_to_port')
def test_create_network_interface_rollback(self, _add_dhcp_opts_to_port):
self.set_mock_db_items(
tools.update_dict(
fakes.DB_VPC_1,
{'dhcp_options_id': fakes.ID_EC2_DHCP_OPTIONS_1}),
fakes.DB_SUBNET_1, fakes.DB_DHCP_OPTIONS_1)
self.db_api.add_item.return_value = fakes.DB_NETWORK_INTERFACE_1
self.neutron.show_subnet.return_value = {'subnet': fakes.OS_SUBNET_1}
self.neutron.create_port.return_value = {'port': fakes.OS_PORT_1}
_add_dhcp_opts_to_port.side_effect = Exception()
self.assert_execution_error(self.ANY_EXECUTE_ERROR,
'CreateNetworkInterface',
{'SubnetId': fakes.ID_EC2_SUBNET_1})
self.neutron.delete_port.assert_called_once_with(fakes.ID_OS_PORT_1)
self.db_api.delete_item.assert_called_once_with(
mock.ANY, fakes.ID_EC2_NETWORK_INTERFACE_1)
def test_delete_network_interface(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1)
resp = self.execute(
'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1})
self.assertEqual(True, resp['return'])
self.db_api.get_item_by_id.assert_any_call(
mock.ANY,
fakes.ID_EC2_NETWORK_INTERFACE_1)
self.db_api.delete_item.assert_called_once_with(
mock.ANY,
fakes.ID_EC2_NETWORK_INTERFACE_1)
self.neutron.delete_port.assert_called_once_with(
fakes.ID_OS_PORT_1)
def test_delete_network_interface_obsolete(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1)
self.neutron.delete_port.side_effect = (
neutron_exception.PortNotFoundClient())
resp = self.execute(
'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1})
self.assertEqual(True, resp['return'])
def test_delete_network_interface_no_network_interface(self):
self.set_mock_db_items()
self.assert_execution_error(
'InvalidNetworkInterfaceID.NotFound', 'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1})
self.assertEqual(0, self.neutron.delete_port.call_count)
def test_delete_network_interface_is_in_use(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_2)
self.assert_execution_error(
'InvalidParameterValue', 'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2})
self.assertEqual(0, self.neutron.delete_port.call_count)
def test_delete_network_interface_with_public_ip(self):
detached_network_interface_2 = fakes.gen_db_network_interface(
fakes.ID_EC2_NETWORK_INTERFACE_2,
fakes.ID_OS_PORT_2,
fakes.ID_EC2_VPC_1,
fakes.ID_EC2_SUBNET_2,
fakes.IP_NETWORK_INTERFACE_2)
self.set_mock_db_items(detached_network_interface_2,
fakes.DB_ADDRESS_1, fakes.DB_ADDRESS_2)
resp = self.execute(
'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2})
self.assertEqual(True, resp['return'])
self.db_api.get_item_by_id.assert_any_call(
mock.ANY,
fakes.ID_EC2_NETWORK_INTERFACE_2)
self.db_api.delete_item.assert_called_once_with(
mock.ANY,
fakes.ID_EC2_NETWORK_INTERFACE_2)
self.neutron.delete_port.assert_called_once_with(
fakes.ID_OS_PORT_2)
self.db_api.update_item.assert_called_once_with(
mock.ANY,
tools.purge_dict(fakes.DB_ADDRESS_2,
['network_interface_id',
'private_ip_address']))
@tools.screen_unexpected_exception_logs
def test_delete_network_interface_rollback(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1)
self.neutron.delete_port.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'DeleteNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1})
self.db_api.restore_item.assert_called_once_with(
mock.ANY, 'eni', fakes.DB_NETWORK_INTERFACE_1)
def test_describe_network_interfaces(self):
self.set_mock_db_items(
fakes.DB_NETWORK_INTERFACE_1, fakes.DB_NETWORK_INTERFACE_2,
fakes.DB_ADDRESS_1, fakes.DB_ADDRESS_2,
fakes.DB_INSTANCE_1, fakes.DB_INSTANCE_2,
fakes.DB_SECURITY_GROUP_1)
self.neutron.list_ports.return_value = (
{'ports': [fakes.OS_PORT_1, fakes.OS_PORT_2]})
self.neutron.list_floatingips.return_value = (
{'floatingips': [fakes.OS_FLOATING_IP_1,
fakes.OS_FLOATING_IP_2]})
self.neutron.list_security_groups.return_value = (
{'security_groups': [copy.deepcopy(fakes.OS_SECURITY_GROUP_1)]})
resp = self.execute('DescribeNetworkInterfaces', {})
self.assertThat(resp['networkInterfaceSet'],
matchers.ListMatches(
[fakes.EC2_NETWORK_INTERFACE_1,
fakes.EC2_NETWORK_INTERFACE_2],
orderless_lists=True),
verbose=True)
self.db_api.get_items_by_ids = tools.CopyingMock(
return_value=[fakes.DB_NETWORK_INTERFACE_1])
resp = self.execute(
'DescribeNetworkInterfaces',
{'NetworkInterfaceId.1': fakes.ID_EC2_NETWORK_INTERFACE_1})
self.assertThat(resp['networkInterfaceSet'],
matchers.ListMatches(
[fakes.EC2_NETWORK_INTERFACE_1]))
self.db_api.get_items_by_ids.assert_called_once_with(
mock.ANY, set([fakes.ID_EC2_NETWORK_INTERFACE_1]))
self.check_filtering(
'DescribeNetworkInterfaces', 'networkInterfaceSet',
[('addresses.private-ip-address',
fakes.IP_NETWORK_INTERFACE_2_EXT_1,),
('addresses.primary', False),
('addresses.association.public-ip', fakes.IP_ADDRESS_2),
('addresses.association.owner-id', fakes.ID_OS_PROJECT),
('association.association-id', fakes.ID_EC2_ASSOCIATION_2),
('association.allocation-id', fakes.ID_EC2_ADDRESS_2),
('association.ip-owner-id', fakes.ID_OS_PROJECT),
('association.public-ip', fakes.IP_ADDRESS_2),
('attachment.attachment-id',
fakes.ID_EC2_NETWORK_INTERFACE_2_ATTACH),
('attachment.instance-id', fakes.ID_EC2_INSTANCE_1),
('attachment.instance-owner-id', fakes.ID_OS_PROJECT),
('attachment.device-index', 0),
('attachment.status', 'attached'),
('attachment.attach.time', fakes.TIME_ATTACH_NETWORK_INTERFACE),
('attachment.delete-on-termination', False),
('description', fakes.DESCRIPTION_NETWORK_INTERFACE_1),
('group-id', fakes.ID_EC2_SECURITY_GROUP_1),
('group-name', fakes.NAME_DEFAULT_OS_SECURITY_GROUP),
('mac-address', fakes.MAC_ADDRESS),
('network-interface-id', fakes.ID_EC2_NETWORK_INTERFACE_1),
('owner-id', fakes.ID_OS_PROJECT),
('private-ip-address', fakes.IP_NETWORK_INTERFACE_1),
('requester-managed', False),
('source-dest-check', True),
('status', 'available'),
('vpc-id', fakes.ID_EC2_VPC_1),
('subnet-id', fakes.ID_EC2_SUBNET_2)])
self.check_tag_support(
'DescribeNetworkInterfaces', 'networkInterfaceSet',
fakes.ID_EC2_NETWORK_INTERFACE_1, 'networkInterfaceId')
def test_describe_network_interface_attribute(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1)
resp = self.execute(
'DescribeNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'Attribute': 'description'})
self.assertEqual(fakes.ID_EC2_NETWORK_INTERFACE_1,
resp['networkInterfaceId'])
self.assertEqual(fakes.DESCRIPTION_NETWORK_INTERFACE_1,
resp['description'].get('value', None))
def test_modify_network_interface_attribute(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1,
fakes.DB_NETWORK_INTERFACE_2)
self.execute(
'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'Description.Value': 'New description'})
self.db_api.update_item.assert_called_once_with(
mock.ANY,
tools.update_dict(fakes.DB_NETWORK_INTERFACE_1,
{'description': 'New description'}))
self.db_api.reset_mock()
self.execute(
'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'Attachment.AttachmentId': (
fakes.ID_EC2_NETWORK_INTERFACE_2_ATTACH),
'Attachment.DeleteOnTermination': 'True'})
self.db_api.update_item.assert_called_once_with(
mock.ANY,
tools.update_dict(fakes.DB_NETWORK_INTERFACE_2,
{'delete_on_termination': True}))
def test_modify_network_interface_attribute_invalid_parameters(self):
self.assert_execution_error(
'InvalidParameterCombination', 'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'Description.Value': 'New description',
'SourceDestCheck.Value': 'True'})
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1,
fakes.DB_NETWORK_INTERFACE_2)
self.assert_execution_error(
'MissingParameter', 'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'Attachment.DeleteOnTermination': 'True'})
self.assert_execution_error(
'MissingParameter', 'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'Attachment.AttachmentId': (
fakes.ID_EC2_NETWORK_INTERFACE_2_ATTACH)})
self.assert_execution_error(
'InvalidAttachmentID.NotFound', 'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'Attachment.AttachmentId': (
fakes.ID_EC2_NETWORK_INTERFACE_2_ATTACH),
'Attachment.DeleteOnTermination': 'True'})
self.assert_execution_error(
'InvalidAttachmentID.NotFound', 'ModifyNetworkInterfaceAttribute',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'Attachment.AttachmentId': ec2utils.change_ec2_id_kind(
fakes.ID_EC2_NETWORK_INTERFACE_1, 'eni-attach'),
'Attachment.DeleteOnTermination': 'True'})
def test_reset_network_interface_attribute(self):
self.execute(
'ResetNetworkInterfaceAttribute',
{'NetworkInterfaceId':
fakes.ID_EC2_NETWORK_INTERFACE_1,
'Attribute': 'sourceDestCheck'})
def test_attach_network_interface(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1,
fakes.DB_INSTANCE_1)
self.neutron.show_port.return_value = (
{'port': fakes.OS_PORT_1})
self.isotime.return_value = fakes.TIME_ATTACH_NETWORK_INTERFACE
self.execute(
'AttachNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'InstanceId': fakes.ID_EC2_INSTANCE_1,
'DeviceIndex': '1'})
self.nova.servers.interface_attach.assert_called_once_with(
fakes.ID_OS_INSTANCE_1, fakes.ID_OS_PORT_1, None, None)
self.db_api.update_item.assert_called_once_with(
mock.ANY,
tools.update_dict(
fakes.DB_NETWORK_INTERFACE_1,
{'device_index': 1,
'instance_id': fakes.ID_EC2_INSTANCE_1,
'delete_on_termination': False,
'attach_time': fakes.TIME_ATTACH_NETWORK_INTERFACE}))
def test_attach_network_interface_invalid_parameters(self):
# NOTE(ft): eni is already attached
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_2)
self.assert_execution_error(
'InvalidParameterValue', 'AttachNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'InstanceId': fakes.ID_EC2_INSTANCE_2,
'DeviceIndex': '1'})
# NOTE(ft): device index is in use
self.set_mock_db_items(
fakes.DB_NETWORK_INTERFACE_1, fakes.DB_NETWORK_INTERFACE_2,
fakes.DB_INSTANCE_1)
self.assert_execution_error(
'InvalidParameterValue', 'AttachNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'InstanceId': fakes.ID_EC2_INSTANCE_1,
'DeviceIndex': '0'})
@tools.screen_unexpected_exception_logs
def test_attach_network_interface_rollback(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1,
fakes.DB_INSTANCE_1)
self.neutron.show_port.return_value = (
{'port': fakes.OS_PORT_2})
self.isotime.return_value = fakes.TIME_ATTACH_NETWORK_INTERFACE
self.nova.servers.interface_attach.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'AttachNetworkInterface',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'InstanceId': fakes.ID_EC2_INSTANCE_1,
'DeviceIndex': '1'})
self.db_api.update_item.assert_any_call(
mock.ANY, fakes.DB_NETWORK_INTERFACE_1)
def test_detach_network_interface(self):
network_interface = tools.update_dict(fakes.DB_NETWORK_INTERFACE_2,
{'device_index': 1})
self.set_mock_db_items(network_interface)
self.neutron.show_port.return_value = (
{'port': fakes.OS_PORT_2})
self.execute(
'DetachNetworkInterface',
{'AttachmentId': ec2utils.change_ec2_id_kind(
fakes.ID_EC2_NETWORK_INTERFACE_2, 'eni-attach')})
self.neutron.update_port.assert_called_once_with(
fakes.ID_OS_PORT_2,
{'port': {'device_id': '',
'device_owner': ''}}
)
self.db_api.update_item.assert_called_once_with(
mock.ANY,
tools.purge_dict(fakes.DB_NETWORK_INTERFACE_2,
{'device_index',
'instance_id',
'delete_on_termination',
'attach_time'}))
def test_detach_network_interface_invalid_parameters(self):
# NOTE(ft): eni is not found
self.set_mock_db_items()
self.assert_execution_error(
'InvalidAttachmentID.NotFound', 'DetachNetworkInterface',
{'AttachmentId': ec2utils.change_ec2_id_kind(
fakes.ID_EC2_NETWORK_INTERFACE_2, 'eni-attach')})
# NOTE(ft): eni is attached with device index = 0
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_2)
self.assert_execution_error(
'OperationNotPermitted', 'DetachNetworkInterface',
{'AttachmentId': ec2utils.change_ec2_id_kind(
fakes.ID_EC2_NETWORK_INTERFACE_2, 'eni-attach')})
@tools.screen_unexpected_exception_logs
def test_detach_network_interface_rollback(self):
network_interface = tools.update_dict(fakes.DB_NETWORK_INTERFACE_2,
{'device_index': 1})
self.set_mock_db_items(network_interface)
self.neutron.show_port.return_value = (
{'port': fakes.OS_PORT_2})
self.neutron.update_port.side_effect = Exception()
self.assert_execution_error(
self.ANY_EXECUTE_ERROR, 'DetachNetworkInterface',
{'AttachmentId': fakes.ID_EC2_NETWORK_INTERFACE_2_ATTACH})
self.db_api.update_item.assert_any_call(
mock.ANY, network_interface)
def test_assign_unassign_private_ip_addresses(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1, fakes.DB_SUBNET_1)
self.neutron.show_subnet.return_value = (
{'subnet': fakes.OS_SUBNET_1})
self.neutron.show_port.return_value = (
{'port': copy.deepcopy(fakes.OS_PORT_1)})
self.execute(
'AssignPrivateIpAddresses',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'PrivateIpAddress.1': '10.10.1.5',
'PrivateIpAddress.2': '10.10.1.6',
})
self.neutron.update_port.assert_called_once_with(
fakes.ID_OS_PORT_1,
{'port':
{'fixed_ips': [
{'subnet_id': fakes.ID_OS_SUBNET_1,
'ip_address': fakes.IP_NETWORK_INTERFACE_1},
{'ip_address': '10.10.1.5'},
{'ip_address': '10.10.1.6'}]}})
self.execute(
'UnassignPrivateIpAddresses',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'PrivateIpAddress.1': '10.10.1.5',
'PrivateIpAddress.2': '10.10.1.6',
})
self.neutron.update_port.assert_any_call(
fakes.ID_OS_PORT_1,
{'port':
{'fixed_ips': [
{'subnet_id': fakes.ID_OS_SUBNET_1,
'ip_address': fakes.IP_NETWORK_INTERFACE_1}]}})
def test_assign_private_ip_addresses_invalid_parameters(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_1, fakes.DB_SUBNET_1)
self.neutron.show_subnet.return_value = (
{'subnet': fakes.OS_SUBNET_1})
self.neutron.show_port.return_value = (
{'port': copy.deepcopy(fakes.OS_PORT_1)})
def do_check(error_code):
self.assert_execution_error(
error_code, 'AssignPrivateIpAddresses',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_1,
'PrivateIpAddress.1': '10.10.1.5',
'PrivateIpAddress.2': '10.10.1.6',
})
self.neutron.update_port.side_effect = (
neutron_exception.IpAddressGenerationFailureClient())
do_check('InsufficientFreeAddressesInSubnet')
self.neutron.update_port.side_effect = (
neutron_exception.IpAddressInUseClient())
do_check('InvalidParameterValue')
self.neutron.update_port.side_effect = (
neutron_exception.BadRequest())
do_check('InvalidParameterValue')
def test_unassign_private_ip_addresses_invalid_parameters(self):
self.set_mock_db_items(fakes.DB_NETWORK_INTERFACE_2, fakes.DB_SUBNET_2)
self.neutron.show_subnet.return_value = (
{'subnet': fakes.OS_SUBNET_2})
self.neutron.show_port.return_value = (
{'port': copy.deepcopy(fakes.OS_PORT_2)})
self.assert_execution_error(
'InvalidParameterValue', 'UnassignPrivateIpAddresses',
{'NetworkInterfaceId': fakes.ID_EC2_NETWORK_INTERFACE_2,
'PrivateIpAddress.1': '10.10.2.55'})
| {
"content_hash": "fc1a678a4b881361b1fd447d541ec710",
"timestamp": "",
"source": "github",
"line_count": 664,
"max_line_length": 79,
"avg_line_length": 46.46234939759036,
"alnum_prop": 0.5778742990502739,
"repo_name": "hayderimran7/ec2-api",
"id": "ef5dd7c96d2e83795fb72c223572213c83822e85",
"size": "31441",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "ec2api/tests/unit/test_network_interface.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1701144"
},
{
"name": "Shell",
"bytes": "29607"
}
],
"symlink_target": ""
} |
"""The tests for the image_processing component."""
from unittest.mock import patch, PropertyMock
from homeassistant.core import callback
from homeassistant.const import ATTR_ENTITY_PICTURE
from homeassistant.bootstrap import setup_component
from homeassistant.exceptions import HomeAssistantError
import homeassistant.components.http as http
import homeassistant.components.image_processing as ip
from tests.common import (
get_test_home_assistant, get_test_instance_port, assert_setup_component)
class TestSetupImageProcessing(object):
"""Test class for setup image processing."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_component(self):
"""Setup demo platfrom on image_process component."""
config = {
ip.DOMAIN: {
'platform': 'demo'
}
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
def test_setup_component_with_service(self):
"""Setup demo platfrom on image_process component test service."""
config = {
ip.DOMAIN: {
'platform': 'demo'
}
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
assert self.hass.services.has_service(ip.DOMAIN, 'scan')
class TestImageProcessing(object):
"""Test class for image processing."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
setup_component(
self.hass, http.DOMAIN,
{http.DOMAIN: {http.CONF_SERVER_PORT: get_test_instance_port()}})
config = {
ip.DOMAIN: {
'platform': 'test'
},
'camera': {
'platform': 'demo'
},
}
setup_component(self.hass, ip.DOMAIN, config)
state = self.hass.states.get('camera.demo_camera')
self.url = "{0}{1}".format(
self.hass.config.api.base_url,
state.attributes.get(ATTR_ENTITY_PICTURE))
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch('homeassistant.components.camera.demo.DemoCamera.camera_image',
autospec=True, return_value=b'Test')
def test_get_image_from_camera(self, mock_camera):
"""Grab a image from camera entity."""
self.hass.start()
ip.scan(self.hass, entity_id='image_processing.test')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.test')
assert mock_camera.called
assert state.state == '1'
assert state.attributes['image'] == b'Test'
@patch('homeassistant.components.camera.async_get_image',
side_effect=HomeAssistantError())
def test_get_image_without_exists_camera(self, mock_image):
"""Try to get image without exists camera."""
self.hass.states.remove('camera.demo_camera')
ip.scan(self.hass, entity_id='image_processing.test')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.test')
assert mock_image.called
assert state.state == '0'
class TestImageProcessingAlpr(object):
"""Test class for image processing."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
config = {
ip.DOMAIN: {
'platform': 'demo'
},
'camera': {
'platform': 'demo'
},
}
with patch('homeassistant.components.image_processing.demo.'
'DemoImageProcessingAlpr.should_poll',
new_callable=PropertyMock(return_value=False)):
setup_component(self.hass, ip.DOMAIN, config)
state = self.hass.states.get('camera.demo_camera')
self.url = "{0}{1}".format(
self.hass.config.api.base_url,
state.attributes.get(ATTR_ENTITY_PICTURE))
self.alpr_events = []
@callback
def mock_alpr_event(event):
"""Mock event."""
self.alpr_events.append(event)
self.hass.bus.listen('found_plate', mock_alpr_event)
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_alpr_event_single_call(self, aioclient_mock):
"""Setup and scan a picture and test plates from event."""
aioclient_mock.get(self.url, content=b'image')
ip.scan(self.hass, entity_id='image_processing.demo_alpr')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.demo_alpr')
assert len(self.alpr_events) == 4
assert state.state == 'AC3829'
event_data = [event.data for event in self.alpr_events if
event.data.get('plate') == 'AC3829']
assert len(event_data) == 1
assert event_data[0]['plate'] == 'AC3829'
assert event_data[0]['confidence'] == 98.3
assert event_data[0]['entity_id'] == 'image_processing.demo_alpr'
def test_alpr_event_double_call(self, aioclient_mock):
"""Setup and scan a picture and test plates from event."""
aioclient_mock.get(self.url, content=b'image')
ip.scan(self.hass, entity_id='image_processing.demo_alpr')
ip.scan(self.hass, entity_id='image_processing.demo_alpr')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.demo_alpr')
assert len(self.alpr_events) == 4
assert state.state == 'AC3829'
event_data = [event.data for event in self.alpr_events if
event.data.get('plate') == 'AC3829']
assert len(event_data) == 1
assert event_data[0]['plate'] == 'AC3829'
assert event_data[0]['confidence'] == 98.3
assert event_data[0]['entity_id'] == 'image_processing.demo_alpr'
@patch('homeassistant.components.image_processing.demo.'
'DemoImageProcessingAlpr.confidence',
new_callable=PropertyMock(return_value=95))
def test_alpr_event_single_call_confidence(self, confidence_mock,
aioclient_mock):
"""Setup and scan a picture and test plates from event."""
aioclient_mock.get(self.url, content=b'image')
ip.scan(self.hass, entity_id='image_processing.demo_alpr')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.demo_alpr')
assert len(self.alpr_events) == 2
assert state.state == 'AC3829'
event_data = [event.data for event in self.alpr_events if
event.data.get('plate') == 'AC3829']
assert len(event_data) == 1
assert event_data[0]['plate'] == 'AC3829'
assert event_data[0]['confidence'] == 98.3
assert event_data[0]['entity_id'] == 'image_processing.demo_alpr'
class TestImageProcessingFaceIdentify(object):
"""Test class for image processing."""
def setup_method(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
config = {
ip.DOMAIN: {
'platform': 'demo'
},
'camera': {
'platform': 'demo'
},
}
with patch('homeassistant.components.image_processing.demo.'
'DemoImageProcessingFaceIdentify.should_poll',
new_callable=PropertyMock(return_value=False)):
setup_component(self.hass, ip.DOMAIN, config)
state = self.hass.states.get('camera.demo_camera')
self.url = "{0}{1}".format(
self.hass.config.api.base_url,
state.attributes.get(ATTR_ENTITY_PICTURE))
self.face_events = []
@callback
def mock_face_event(event):
"""Mock event."""
self.face_events.append(event)
self.hass.bus.listen('identify_face', mock_face_event)
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_face_event_call(self, aioclient_mock):
"""Setup and scan a picture and test faces from event."""
aioclient_mock.get(self.url, content=b'image')
ip.scan(self.hass, entity_id='image_processing.demo_face_identify')
self.hass.block_till_done()
state = self.hass.states.get('image_processing.demo_face_identify')
assert len(self.face_events) == 2
assert state.state == 'Hans'
assert state.attributes['total_faces'] == 4
event_data = [event.data for event in self.face_events if
event.data.get('name') == 'Hans']
assert len(event_data) == 1
assert event_data[0]['name'] == 'Hans'
assert event_data[0]['confidence'] == 98.34
assert event_data[0]['entity_id'] == \
'image_processing.demo_face_identify'
| {
"content_hash": "ef76f1871389f4aaf303fd469c70e130",
"timestamp": "",
"source": "github",
"line_count": 272,
"max_line_length": 77,
"avg_line_length": 34.283088235294116,
"alnum_prop": 0.5947453083109919,
"repo_name": "eagleamon/home-assistant",
"id": "b13dcf48a72e466ab9cd9aa85b4f85f8f52e6d1a",
"size": "9325",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/image_processing/test_init.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1510047"
},
{
"name": "Python",
"bytes": "5066084"
},
{
"name": "Ruby",
"bytes": "379"
},
{
"name": "Shell",
"bytes": "14079"
}
],
"symlink_target": ""
} |
import tensorflow as tf
from tensorflow.python.framework import ops
import projecting_op
'''
@tf.RegisterShape("Project")
def _project_shape(op):
"""Shape function for the Backproject op.
"""
dims_data = op.inputs[0].get_shape().as_list()
batch_size = dims_data[0]
channels = dims_data[4]
dims_image = op.inputs[1].get_shape().as_list()
height = dims_image[1]
width = dims_image[2]
output_shape = tf.TensorShape([batch_size, height, width, channels])
return [output_shape]
'''
@ops.RegisterGradient("Project")
def _project_grad(op, grad):
"""The gradients for `project`.
Args:
op: The `project` `Operation` that we are differentiating, which we can use
to find the inputs and outputs of the original op.
grad: Gradient with respect to the output of the `backproject` op.
Returns:
Gradients with respect to the input of `project`.
"""
data = op.inputs[0]
depth = op.inputs[1]
meta_data = op.inputs[2]
kernel_size = op.get_attr('kernel_size')
threshold = op.get_attr('threshold')
# compute gradient
data_grad = projecting_op.project_grad(data, depth, meta_data, grad, kernel_size, threshold)
return [data_grad, None, None] # List of one Tensor, since we have three input
| {
"content_hash": "b94aa12f0b181105edafee2e8655bb10",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 94,
"avg_line_length": 30.195121951219512,
"alnum_prop": 0.6922455573505655,
"repo_name": "yuxng/DA-RNN",
"id": "0bb5dbb8cb55d2b726876248edba31387ba8d0ea",
"size": "1238",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/projecting_layer/projecting_op_grad.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "1173"
},
{
"name": "C++",
"bytes": "587647"
},
{
"name": "CMake",
"bytes": "6045"
},
{
"name": "Cuda",
"bytes": "179693"
},
{
"name": "GLSL",
"bytes": "2882"
},
{
"name": "Makefile",
"bytes": "54"
},
{
"name": "Python",
"bytes": "271435"
},
{
"name": "Shell",
"bytes": "30015"
}
],
"symlink_target": ""
} |
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.coverage', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-javabridge'
copyright = u'2013, Broad Institute of MIT and Harvard'
def get_version():
version_file = os.path.join(os.path.dirname(__file__), '..', 'javabridge',
'_version.py')
if os.path.exists(version_file):
with open(version_file) as f:
cached_version_line = f.read().strip()
try:
import re
# From http://stackoverflow.com/a/3619714/17498
cached_version = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
cached_version_line, re.M).group(1)
except:
raise RuntimeError("Unable to find version in %s" % version_file)
split_version = cached_version.split('.')
return '.'.join(split_version[:2]), cached_version
else:
return '0.0', '0.0.0'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# version: The short X.Y version.
# release: The full version, including alpha/beta/rc tags.
version, release = get_version()
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-javabridgedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'python-javabridge.tex', u'python-javabridge Documentation',
u'Lee Kamentsky, Vebjorn Ljosa', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-javabridge', u'python-javabridge Documentation',
[u'Lee Kamentsky, Vebjorn Ljosa'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'python-javabridge', u'python-javabridge Documentation',
u'Lee Kamentsky, Vebjorn Ljosa', 'python-javabridge', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| {
"content_hash": "3f6ff9ba5ca28385e054c541868b70ea",
"timestamp": "",
"source": "github",
"line_count": 256,
"max_line_length": 129,
"avg_line_length": 33.98828125,
"alnum_prop": 0.689690840133318,
"repo_name": "jakirkham/python-javabridge",
"id": "6fd80fe29a562ff9e3f97a9f93c4c79eab4414b5",
"size": "9129",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "docs/conf.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "27049"
},
{
"name": "Java",
"bytes": "15113"
},
{
"name": "Python",
"bytes": "1846876"
},
{
"name": "Shell",
"bytes": "1213"
}
],
"symlink_target": ""
} |
"""
SAX driver for the Java SAX parsers. Can only be used in Jython.
$Id: drv_javasax.py,v 1.5 2003/01/26 09:08:51 loewis Exp $
"""
# --- Initialization
version = "0.10"
revision = "$Revision: 1.5 $"
import string
from xml.sax import xmlreader, saxutils
from xml.sax.handler import feature_namespaces, feature_namespace_prefixes
from xml.sax import _exceptions
# we only work in jython
import sys
if sys.platform[:4] != "java":
raise _exceptions.SAXReaderNotAvailable("drv_javasax not available in CPython", None)
del sys
# get the necessary Java SAX classes
try:
from org.python.core import FilelikeInputStream
from org.xml.sax.helpers import XMLReaderFactory
from org.xml import sax as javasax
from org.xml.sax.ext import LexicalHandler
except ImportError:
raise _exceptions.SAXReaderNotAvailable("SAX is not on the classpath", None)
# get some JAXP stuff
try:
from javax.xml.parsers import SAXParserFactory, ParserConfigurationException
factory = SAXParserFactory.newInstance()
jaxp = 1
except ImportError:
jaxp = 0
from java.lang import String
def _wrap_sax_exception(e):
return _exceptions.SAXParseException(e.message,
e.exception,
SimpleLocator(e.columnNumber,
e.lineNumber,
e.publicId,
e.systemId))
class JyErrorHandlerWrapper(javasax.ErrorHandler):
def __init__(self, err_handler):
self._err_handler = err_handler
def error(self, exc):
self._err_handler.error(_wrap_sax_exception(exc))
def fatalError(self, exc):
self._err_handler.fatalError(_wrap_sax_exception(exc))
def warning(self, exc):
self._err_handler.warning(_wrap_sax_exception(exc))
class JyInputSourceWrapper(javasax.InputSource):
def __init__(self, source):
if isinstance(source, basestring):
javasax.InputSource.__init__(self, source)
elif hasattr(source, "read"):#file like object
f = source
javasax.InputSource.__init__(self, FilelikeInputStream(f))
if hasattr(f, "name"):
self.setSystemId(f.name)
else:#xml.sax.xmlreader.InputSource object
#Use byte stream constructor if possible so that Xerces won't attempt to open
#the url at systemId unless it's really there
if source.getByteStream():
javasax.InputSource.__init__(self,
FilelikeInputStream(source.getByteStream()))
else:
javasax.InputSource.__init__(self)
if source.getSystemId():
self.setSystemId(source.getSystemId())
self.setPublicId(source.getPublicId())
self.setEncoding(source.getEncoding())
class JyEntityResolverWrapper(javasax.EntityResolver):
def __init__(self, entityResolver):
self._resolver = entityResolver
def resolveEntity(self, pubId, sysId):
return JyInputSourceWrapper(self._resolver.resolveEntity(pubId, sysId))
class JyDTDHandlerWrapper(javasax.DTDHandler):
def __init__(self, dtdHandler):
self._handler = dtdHandler
def notationDecl(self, name, publicId, systemId):
self._handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, notationName):
self._handler.unparsedEntityDecl(name, publicId, systemId, notationName)
class SimpleLocator(xmlreader.Locator):
def __init__(self, colNum, lineNum, pubId, sysId):
self.colNum = colNum
self.lineNum = lineNum
self.pubId = pubId
self.sysId = sysId
def getColumnNumber(self):
return self.colNum
def getLineNumber(self):
return self.lineNum
def getPublicId(self):
return self.pubId
def getSystemId(self):
return self.sysId
# --- JavaSAXParser
class JavaSAXParser(xmlreader.XMLReader, javasax.ContentHandler, LexicalHandler):
"SAX driver for the Java SAX parsers."
def __init__(self, jdriver = None):
xmlreader.XMLReader.__init__(self)
self._parser = create_java_parser(jdriver)
self._parser.setFeature(feature_namespaces, 0)
self._parser.setFeature(feature_namespace_prefixes, 0)
self._parser.setContentHandler(self)
self._nsattrs = AttributesNSImpl()
self._attrs = AttributesImpl()
self.setEntityResolver(self.getEntityResolver())
self.setErrorHandler(self.getErrorHandler())
self.setDTDHandler(self.getDTDHandler())
try:
self._parser.setProperty("http://xml.org/sax/properties/lexical-handler", self)
except Exception, x:
pass
# XMLReader methods
def parse(self, source):
"Parse an XML document from a URL or an InputSource."
self._parser.parse(JyInputSourceWrapper(source))
def getFeature(self, name):
return self._parser.getFeature(name)
def setFeature(self, name, state):
self._parser.setFeature(name, state)
def getProperty(self, name):
return self._parser.getProperty(name)
def setProperty(self, name, value):
self._parser.setProperty(name, value)
def setEntityResolver(self, resolver):
self._parser.entityResolver = JyEntityResolverWrapper(resolver)
xmlreader.XMLReader.setEntityResolver(self, resolver)
def setErrorHandler(self, err_handler):
self._parser.errorHandler = JyErrorHandlerWrapper(err_handler)
xmlreader.XMLReader.setErrorHandler(self, err_handler)
def setDTDHandler(self, dtd_handler):
self._parser.setDTDHandler(JyDTDHandlerWrapper(dtd_handler))
xmlreader.XMLReader.setDTDHandler(self, dtd_handler)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
self._namespaces = self._parser.getFeature(feature_namespaces)
def startElement(self, uri, lname, qname, attrs):
if self._namespaces:
self._nsattrs._attrs = attrs
self._cont_handler.startElementNS((uri or None, lname), qname,
self._nsattrs)
else:
self._attrs._attrs = attrs
self._cont_handler.startElement(qname, self._attrs)
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def characters(self, char, start, len):
self._cont_handler.characters(unicode(String(char, start, len)))
def ignorableWhitespace(self, char, start, len):
self._cont_handler.ignorableWhitespace(unicode(String(char, start,
len)))
def endElement(self, uri, lname, qname):
if self._namespaces:
self._cont_handler.endElementNS((uri or None, lname), qname)
else:
self._cont_handler.endElement(qname)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def endDocument(self):
self._cont_handler.endDocument()
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
# Lexical handler methods
def comment(self, char, start, len):
try:
# Need to wrap this in a try..except in case the parser does not support lexical events
self._cont_handler.comment(unicode(String(char, start, len)))
except:
pass
def startCDATA(self):
pass # TODO
def endCDATA(self):
pass # TODO
def startDTD(self, name, publicId, systemId):
pass # TODO
def endDTD(self):
pass # TODO
def startEntity(self, name):
pass # TODO
def endEntity(self, name):
pass # TODO
def _fixTuple(nsTuple, frm, to):
if isinstance(nsTuple, tuple) and len(nsTuple) == 2:
nsUri, localName = nsTuple
if nsUri == frm:
nsUri = to
return (nsUri, localName)
return nsTuple
def _makeJavaNsTuple(nsTuple):
return _fixTuple(nsTuple, None, '')
def _makePythonNsTuple(nsTuple):
return _fixTuple(nsTuple, '', None)
class AttributesImpl:
def __init__(self, attrs = None):
self._attrs = attrs
def getLength(self):
return self._attrs.getLength()
def getType(self, name):
return self._attrs.getType(_makeJavaNsTuple(name))
def getValue(self, name):
value = self._attrs.getValue(_makeJavaNsTuple(name))
if value == None:
raise KeyError(name)
return value
def getNames(self):
return [_makePythonNsTuple(self._attrs.getQName(index)) for index in range(len(self))]
def getQNames(self):
return [self._attrs.getQName(index) for index in range(len(self))]
def getValueByQName(self, qname):
idx = self._attrs.getIndex(qname)
if idx == -1:
raise KeyError, qname
return self._attrs.getValue(idx)
def getNameByQName(self, qname):
idx = self._attrs.getIndex(qname)
if idx == -1:
raise KeyError, qname
return qname
def getQNameByName(self, name):
idx = self._attrs.getIndex(_makeJavaNsTuple(name))
if idx == -1:
raise KeyError, name
return name
def __len__(self):
return self._attrs.getLength()
def __getitem__(self, name):
return self.getValue(name)
def keys(self):
return self.getNames()
def copy(self):
return self.__class__(self._attrs)
def items(self):
return [(name, self[name]) for name in self.getNames()]
def values(self):
return map(self.getValue, self.getNames())
def get(self, name, alt=None):
try:
return self.getValue(name)
except KeyError:
return alt
def has_key(self, name):
try:
self.getValue(name)
return True
except KeyError:
return False
# --- AttributesNSImpl
class AttributesNSImpl(AttributesImpl):
def __init__(self, attrs=None):
AttributesImpl.__init__(self, attrs)
def getType(self, name):
name = _makeJavaNsTuple(name)
return self._attrs.getType(name[0], name[1])
def getValue(self, name):
jname = _makeJavaNsTuple(name)
value = self._attrs.getValue(jname[0], jname[1])
if value == None:
raise KeyError(name)
return value
def getNames(self):
names = []
for idx in range(len(self)):
names.append(_makePythonNsTuple( (self._attrs.getURI(idx), self._attrs.getLocalName(idx)) ))
return names
def getNameByQName(self, qname):
idx = self._attrs.getIndex(qname)
if idx == -1:
raise KeyError, qname
return _makePythonNsTuple( (self._attrs.getURI(idx), self._attrs.getLocalName(idx)) )
def getQNameByName(self, name):
name = _makeJavaNsTuple(name)
idx = self._attrs.getIndex(name[0], name[1])
if idx == -1:
raise KeyError, name
return self._attrs.getQName(idx)
def getQNames(self):
return [self._attrs.getQName(idx) for idx in range(len(self))]
# ---
def create_java_parser(jdriver = None):
try:
if jdriver:
return XMLReaderFactory.createXMLReader(jdriver)
elif jaxp:
return factory.newSAXParser().getXMLReader()
else:
return XMLReaderFactory.createXMLReader()
except ParserConfigurationException, e:
raise _exceptions.SAXReaderNotAvailable(e.getMessage())
except javasax.SAXException, e:
raise _exceptions.SAXReaderNotAvailable(e.getMessage())
def create_parser(jdriver = None):
return JavaSAXParser(jdriver)
| {
"content_hash": "53f9fa63ba7d50c26973a4ba6e3b8de4",
"timestamp": "",
"source": "github",
"line_count": 382,
"max_line_length": 104,
"avg_line_length": 31.664921465968586,
"alnum_prop": 0.624917328042328,
"repo_name": "isandlaTech/cohorte-devtools",
"id": "ec85c6fffeb641ce092da3eac68fc05ec936b8d1",
"size": "12096",
"binary": false,
"copies": "23",
"ref": "refs/heads/master",
"path": "org.cohorte.eclipse.runner.basic/files/jython/Lib/xml/sax/drivers2/drv_javasax.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "151318"
},
{
"name": "HTML",
"bytes": "113064"
},
{
"name": "Java",
"bytes": "172793"
},
{
"name": "JavaScript",
"bytes": "2165497"
},
{
"name": "Python",
"bytes": "13926564"
},
{
"name": "Shell",
"bytes": "1490"
}
],
"symlink_target": ""
} |
import unittest
import parse
__author__ = 'Ambareesh Revanur'
class TestStringMethods(unittest.TestCase):
def test_if(self):
lex_outputfile='tc/if.txt'
self.assertEqual('accept', parse.parsefile(lex_outputfile))
def test_ifel(self):
lex_outputfile='tc/ifel.txt'
self.assertEqual('accept', parse.parsefile(lex_outputfile))
def test_for(self):
lex_outputfile='tc/for.txt'
self.assertEqual('accept', parse.parsefile(lex_outputfile))
def test_for(self):
lex_outputfile='tc/expr.txt'
self.assertEqual('accept', parse.parsefile(lex_outputfile))
if __name__ == '__main__':
unittest.main() | {
"content_hash": "a2386e29b4852f222437a06f949ea5fe",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 71,
"avg_line_length": 25.11111111111111,
"alnum_prop": 0.6489675516224189,
"repo_name": "revanurambareesh/simpcppparse",
"id": "d8769c1e03b13e2676e7d916fc8a4aba9f1fc79b",
"size": "678",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C++",
"bytes": "389"
},
{
"name": "Lex",
"bytes": "2541"
},
{
"name": "Python",
"bytes": "6721"
},
{
"name": "Shell",
"bytes": "122"
}
],
"symlink_target": ""
} |
"""
__graph_MT_post__Model.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
___________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import *
from ATOM3Constraint import *
class graph_MT_post__Model(graphEntity):
def __init__(self, x, y, semObject = None):
self.semanticObject = semObject
self.sizeX, self.sizeY = 172, 82
graphEntity.__init__(self, x, y)
self.ChangesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if showGG and self.semanticObject: self.drawGGLabel(drawing)
h = drawing.create_oval(self.translate([189.0, 62.0, 189.0, 62.0]), tags = (self.tag, 'connector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([20.0, 20.0, 190.0, 100.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'moccasin')
self.gf4 = GraphicalForm(drawing, h, "gf4")
self.graphForms.append(self.gf4)
font = tkFont.Font( family='Arial', size=12, weight='normal', slant='roman', underline=0)
h = drawing.create_text(self.translate([110.0, 41.0, 110.0, 12.0])[:2], tags = self.tag, font=font, fill = 'black', anchor = 'center', text = 'MT_post__Model_S', width = '0', justify= 'left', stipple='' )
self.gf66 = GraphicalForm(drawing, h, 'gf66', fontObject=font)
self.graphForms.append(self.gf66)
helv12 = tkFont.Font ( family="Helvetica", size=12, weight="bold" )
h = drawing.create_text(self.translate([-3, -3]), font=helv12,
tags = (self.tag, self.semanticObject.getClass()),
fill = "black",
text=self.semanticObject.MT_label__.toString())
self.attr_display["MT_label__"] = h
self.gf_label = GraphicalForm(drawing, h, 'gf_label', fontObject=helv12)
self.graphForms.append(self.gf_label)
def postCondition( self, actionID, * params):
return None
def preCondition( self, actionID, * params):
return None
def getImageDict( self ):
imageDict = dict()
return imageDict
new_class = graph_MT_post__Model
| {
"content_hash": "4aad5a5c7075064ffe240aa611cfd670",
"timestamp": "",
"source": "github",
"line_count": 63,
"max_line_length": 212,
"avg_line_length": 41.42857142857143,
"alnum_prop": 0.5754789272030651,
"repo_name": "levilucio/SyVOLT",
"id": "3f264f3c35aea6264d6efa85f991b713f54237a9",
"size": "2610",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "UMLRT2Kiltera_MM/graph_MT_post__Model.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "166159"
},
{
"name": "Python",
"bytes": "34207588"
},
{
"name": "Shell",
"bytes": "1118"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('opconsole', '0010_auto_20170411_1949'),
]
operations = [
migrations.RenameField(
model_name='zones',
old_name='colors',
new_name='color',
),
]
| {
"content_hash": "86389b13caed00df7538454149b4fea7",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 49,
"avg_line_length": 19.72222222222222,
"alnum_prop": 0.571830985915493,
"repo_name": "baalkor/timetracking",
"id": "729e9e52a819abaa8828cb5d9d4d83d2b82fc721",
"size": "428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "opconsole/migrations/0011_auto_20170411_1950.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "2854"
},
{
"name": "HTML",
"bytes": "46860"
},
{
"name": "JavaScript",
"bytes": "17219"
},
{
"name": "Python",
"bytes": "98024"
}
],
"symlink_target": ""
} |
"""
tally settings, FOR TESTING PURPOSES ONLY
(this file is not included in the distribution)
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
ALLOWED_HOSTS = ['*']
SECRET_KEY = 'tally_tests_are_not_secret'
INSTALLED_APPS = (
'tally',
)
ROOT_URLCONF = 'tally.urls'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Required
TALLY_DATA_DIR = os.path.join(BASE_DIR, 'data')
| {
"content_hash": "69e7ac5bac58b71b12170dc8a59b2d53",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 53,
"avg_line_length": 17.10810810810811,
"alnum_prop": 0.6445497630331753,
"repo_name": "dcwatson/tally",
"id": "f602d86fe486c547ea27a1694590a1e1953458e1",
"size": "633",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tally/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1824"
},
{
"name": "JavaScript",
"bytes": "4662"
},
{
"name": "Python",
"bytes": "30905"
},
{
"name": "Shell",
"bytes": "6699"
}
],
"symlink_target": ""
} |
def recordAllZeros(conservationFile, numBases):
# Record a row of zeros
for i in range(numBases):
# Record a 0 for each base
conservationFile.write("0" + "\t")
conservationFile.write("\n")
def processPhastconsHeader(PhastconsLineElements):
# Process a header in a Phastcons file
PhastconsChromInfo = PhastconsLineElements[1].split("=")
PhastconsChrom = PhastconsChromInfo[1]
PhastconsStartInfo = PhastconsLineElements[2].split("=")
return [PhastconsChrom, PhastconsStartInfo]
def recordScores(conservationFile, PhastconsScores):
# Record the per-base Phastcons scores to the output file
for score in PhastconsScores:
# Iterate through the scores and record each to the output file
conservationFile.write(str(score) + "\t")
conservationFile.write("\n")
def checkIfAtEnd(PhastconsLineElements, conservationFile, numBases):
# Check if the current line is the end of the file
if len(PhastconsLineElements) == 0:
# There is no more conservation information, so do not record any conservation information for the region
endReached = True
stopReached = True
return [endReached, stopReached]
else:
# There is more conservation information
return [False, False]
def updatePhastConsInfo(PhastconsFile, conservationFile, PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom):
# Update the location of the current conservation information based on the Phastcons header
endReached = False
stopReached = False
while len(PhastconsLineElements) > 1:
# At a new starting index for conservation scores
[PhastconsChrom, PhastconsStartInfo] = processPhastconsHeader(PhastconsLineElements)
if PhastconsChrom > chrom:
# There is no more conservation information for the rest of the region, so do not compute its conservation
lastPhastconsScores = np.zeros(numBases)
stopReached = True
break
if int(PhastconsStartInfo[1]) != PhastconsIndex:
# Modify PhastconsIndex appropriately
PhastconsIndex = int(PhastconsStartInfo[1])
PhastconsLineElements = PhastconsFile.readline().strip().split(" ")
[endReached, stopReached] = checkIfAtEnd(PhastconsLineElements, conservationFile, numBases)
if stopReached:
# There is no more conservation information for the rest of the region, so stop
break
return [PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached]
def getNextScore(PhastconsFile, conservationFile, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom):
# Get the next conservation score
endReached = False
stopReached = False
PhastconsLineElements = PhastconsFile.readline().strip().split(" ")
if len(PhastconsLineElements) == 1:
# At a conservation score
PhastconsIndex = PhastconsIndex + 1
else:
# Get the header information or stop if there is no header
[endReached, stopReached] = checkIfAtEnd(PhastconsLineElements, conservationFile, numBases)
[PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached] = updatePhastConsInfo(PhastconsFile, conservationFile, PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom)
return [PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached]
def getPerBaseConservation(bedFileName, PhastconsFileName, conservationFileName, numBases):
# Gets the conservation score for every base of of each region
# ASSUMES THAT REGIONS ARE SORTED BY CHROM, START, END
bedFile = open(bedFileName)
PhastconsFile = gzip.open(PhastconsFileName)
conservationFile = open(conservationFileName, 'w+')
PhastconsIndex = 0
PhastconsChrom = "chr0"
lastPhastconsScores = np.zeros(numBases)
lastChrom = "chr0"
lastEnd = 0
endReached = False
for line in bedFile:
# Iterate through regions and find the average conservation score for each
lineElements = line.strip().strip().split("\t")
stopReached = False
PhastconsScores = np.zeros(numBases) # Per-base Phastcons scores for each base in the current region
if endReached == True:
# The conservation information for this region cannot be obtained, so record 0s and continue
recordAllZeros(conservationFile, numBases)
continue
chrom = lineElements[0]
if chrom != lastChrom:
print chrom
start = int(lineElements[1])
end = int(lineElements[2])
if (lastEnd > start) and (lastChrom == chrom):
# The previous peak overlaps with the current peak, so store the previous scores that overlap in the current array
PhastconsScores[0:lastEnd - start] = lastPhastconsScores[numBases - (lastEnd - start):numBases]
lastChrom = chrom
lastEnd = end
while chrom > PhastconsChrom:
# At a new chromosome, so read through Phastcons file until the next chromosome is reached
newChromReached = False
while newChromReached == False:
# Have not reached a new chromosome, so keep reading the Phastcons file
PhastconsLineElements = PhastconsFile.readline().strip().split(" ")
[endReached, stopReached] = checkIfAtEnd(PhastconsLineElements, conservationFile, numBases)
lastPhastconsChrom = PhastconsChrom
[PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached] = updatePhastConsInfo(PhastconsFile, conservationFile, PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom)
if PhastconsChrom != lastPhastconsChrom:
# At a new chromosome
newChromReached = True
if stopReached:
# There is no more conservation information for the rest of the region, so stop
break
if stopReached == True:
# The conservation information for this region cannot be obtained, so continue
break
if stopReached == True:
# The conservation information for this region cannot be obtained, so continue
recordAllZeros(conservationFile, numBases)
continue
if PhastconsChrom > chrom:
# Region conservation information is not in Phastcons file because a new chromosome has been reached in the Phastcons file
recordAllZeros(conservationFile, numBases)
lastPhastconsScores = np.zeros(numBases)
continue
while PhastconsIndex < start:
# Go through bases until the start is reached
[PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached] = getNextScore(PhastconsFile, conservationFile, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom)
if stopReached:
# There is no more conservation information for the rest of the region, so stop
break
if stopReached == True:
# The conservation information for this region cannot be obtained, so continue
recordAllZeros(conservationFile, numBases)
continue
if PhastconsIndex >= end:
# The conservation score is after the region and there is no sufficiently early conservation score
recordAllZeros(conservationFile, numBases)
lastPhastconsScores = np.zeros(numBases)
continue
PhastconsScores[PhastconsIndex-start] = float(PhastconsLineElements[0])
while PhastconsIndex < end:
# Gets conservation scores of every base in the region
[PhastconsLineElements, PhastconsChrom, PhastconsIndex, lastPhastconsScores, endReached, stopReached] = getNextScore(PhastconsFile, conservationFile, PhastconsChrom, PhastconsIndex, lastPhastconsScores, chrom)
if (stopReached) or (PhastconsIndex >= end):
# There is no more conservation information for the rest of the region, so stop
break
PhastconsScores[PhastconsIndex-start] = float(PhastconsLineElements[0])
recordScores(conservationFile, PhastconsScores)
lastPhastconsScores = PhastconsScores
bedFile.close()
PhastconsFile.close()
conservationFile.close()
if __name__=="__main__":
import sys
import gzip
import numpy as np
bedFileName = sys.argv[1]
PhastconsFileName = sys.argv[2]
conservationFileName = sys.argv[3]
numBases = int(sys.argv[4])
getPerBaseConservation(bedFileName, PhastconsFileName, conservationFileName, numBases)
| {
"content_hash": "581f03eacb1f3645ee09f15b053524a0",
"timestamp": "",
"source": "github",
"line_count": 164,
"max_line_length": 243,
"avg_line_length": 48.55487804878049,
"alnum_prop": 0.7782242873288961,
"repo_name": "imk1/IMKTFBindingCode",
"id": "8cb67523fbb7ebfe5241d9fa72028266a8ec1695",
"size": "7963",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "getPerBaseConservation.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1156919"
},
{
"name": "R",
"bytes": "22835"
},
{
"name": "Shell",
"bytes": "416606"
}
],
"symlink_target": ""
} |
houses = ["Stark", "Lannister", "Baratheon", "Greyjoy"]
seats = ["Winterfell", "Casterly Rock", "Storm's End", "Pyke"]
sigils = ["A Gray Direwolf", "A Golden Lion",
"A Crowned Black Stag", "A Golden Kraken"]
words = ["Winter is coming", "Hear me roar !",
"Our is the fury !", "We do not sow"]
print(f"houses : {houses}")
print(f"seats : {seats}")
print(f"sigils : {sigils}")
print(f"words : {words}")
print("\n###########\n")
print(
f"zip(houses, seats, sigils, words) en tant que liste : \n{list(zip(houses, seats, sigils, words))}", )
print("\nunpacking de zip(houses, seats, sigils, words) :\n",
*zip(houses, seats, sigils, words))
print("\n###########\n")
print("Affichage de chacun des éléments de la séquence zip(houses, seats, sigils, words) via un for :")
for got_house_info in zip(houses, seats, sigils, words):
print(got_house_info)
print("\n###########\n")
print("Utilisation dans une compréhension")
print("\n".join([f"{house} : {word}" for house, _,
_, word in zip(houses, seats, sigils, words)]))
print("\n###########\n")
print("Réalisation d'un 'unzip' et retour à la situation initiale")
print("zip(*zip(houses, seats, sigils, words)) comme liste : ",
list(zip(*zip(houses, seats, sigils, words))))
print("\nunpacking de zip(*zip(houses, seats, sigils, words)) :")
houses_2, seats_2, sigils_2, words_2 = zip(*zip(houses, seats, sigils, words))
print(f"=> houses_2 : {houses_2}")
print(f"=> seats_2 : {seats_2}")
print(f"=> sigils_2 : {sigils_2}")
print(f"=> seats_2 : {words_2}")
| {
"content_hash": "26de64ee44eafa47b8a019217e7b07d3",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 107,
"avg_line_length": 34.75555555555555,
"alnum_prop": 0.6138107416879796,
"repo_name": "TGITS/programming-workouts",
"id": "90e310c16407bac1e6b347e7502329cfd878417c",
"size": "1570",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/misc/learning_python/zip_examples_with_got.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "244"
},
{
"name": "C#",
"bytes": "175"
},
{
"name": "CSS",
"bytes": "57544"
},
{
"name": "Clojure",
"bytes": "145363"
},
{
"name": "D",
"bytes": "5141"
},
{
"name": "Dart",
"bytes": "80832"
},
{
"name": "Dockerfile",
"bytes": "811"
},
{
"name": "Elixir",
"bytes": "86418"
},
{
"name": "Elm",
"bytes": "2738"
},
{
"name": "F#",
"bytes": "4142"
},
{
"name": "Gherkin",
"bytes": "503"
},
{
"name": "Gnuplot",
"bytes": "2363"
},
{
"name": "Go",
"bytes": "65562"
},
{
"name": "Groovy",
"bytes": "2457"
},
{
"name": "HTML",
"bytes": "1536579"
},
{
"name": "Haskell",
"bytes": "157"
},
{
"name": "Java",
"bytes": "744052"
},
{
"name": "JavaScript",
"bytes": "79838"
},
{
"name": "Jinja",
"bytes": "362"
},
{
"name": "Julia",
"bytes": "1365"
},
{
"name": "Kotlin",
"bytes": "53565"
},
{
"name": "Lua",
"bytes": "3980"
},
{
"name": "PHP",
"bytes": "264599"
},
{
"name": "Pascal",
"bytes": "2952"
},
{
"name": "Perl",
"bytes": "927"
},
{
"name": "PowerShell",
"bytes": "397"
},
{
"name": "Prolog",
"bytes": "574"
},
{
"name": "Pug",
"bytes": "550"
},
{
"name": "Python",
"bytes": "550192"
},
{
"name": "R",
"bytes": "19071"
},
{
"name": "Raku",
"bytes": "5189"
},
{
"name": "Ruby",
"bytes": "27911"
},
{
"name": "Rust",
"bytes": "71504"
},
{
"name": "Scala",
"bytes": "136475"
},
{
"name": "Shell",
"bytes": "9158"
},
{
"name": "TypeScript",
"bytes": "64644"
}
],
"symlink_target": ""
} |
import copy
from essential.config import cfg
database_opts = [
cfg.StrOpt('sqlite_db',
deprecated_group='DEFAULT',
default='essential.sqlite',
help='The file name to use with SQLite'),
cfg.BoolOpt('sqlite_synchronous',
deprecated_group='DEFAULT',
default=True,
help='If True, SQLite uses synchronous mode'),
cfg.StrOpt('backend',
default='sqlalchemy',
deprecated_name='db_backend',
deprecated_group='DEFAULT',
help='The backend to use for db'),
cfg.StrOpt('connection',
help='The SQLAlchemy connection string used to connect to the '
'database',
secret=True,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_connection',
group='DATABASE'),
cfg.DeprecatedOpt('connection',
group='sql'), ]),
cfg.StrOpt('mysql_sql_mode',
default='TRADITIONAL',
help='The SQL mode to be used for MySQL sessions. '
'This option, including the default, overrides any '
'server-set SQL mode. To use whatever SQL mode '
'is set by the server configuration, '
'set this to no value. Example: mysql_sql_mode='),
cfg.IntOpt('idle_timeout',
default=3600,
deprecated_opts=[cfg.DeprecatedOpt('sql_idle_timeout',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_idle_timeout',
group='DATABASE'),
cfg.DeprecatedOpt('idle_timeout',
group='sql')],
help='Timeout before idle sql connections are reaped'),
cfg.IntOpt('min_pool_size',
default=1,
deprecated_opts=[cfg.DeprecatedOpt('sql_min_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_min_pool_size',
group='DATABASE')],
help='Minimum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_pool_size',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_pool_size',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_pool_size',
group='DATABASE')],
help='Maximum number of SQL connections to keep open in a '
'pool'),
cfg.IntOpt('max_retries',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_retries',
group='DEFAULT'),
cfg.DeprecatedOpt('sql_max_retries',
group='DATABASE')],
help='Maximum db connection retries during startup. '
'(setting -1 implies an infinite retry count)'),
cfg.IntOpt('retry_interval',
default=10,
deprecated_opts=[cfg.DeprecatedOpt('sql_retry_interval',
group='DEFAULT'),
cfg.DeprecatedOpt('reconnect_interval',
group='DATABASE')],
help='Interval between retries of opening a sql connection'),
cfg.IntOpt('max_overflow',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sql_max_overflow',
group='DEFAULT'),
cfg.DeprecatedOpt('sqlalchemy_max_overflow',
group='DATABASE')],
help='If set, use this value for max_overflow with sqlalchemy'),
cfg.IntOpt('connection_debug',
default=0,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_debug',
group='DEFAULT')],
help='Verbosity of SQL debugging information. 0=None, '
'100=Everything'),
cfg.BoolOpt('connection_trace',
default=False,
deprecated_opts=[cfg.DeprecatedOpt('sql_connection_trace',
group='DEFAULT')],
help='Add python stack traces to SQL as comment strings'),
cfg.IntOpt('pool_timeout',
default=None,
deprecated_opts=[cfg.DeprecatedOpt('sqlalchemy_pool_timeout',
group='DATABASE')],
help='If set, use this value for pool_timeout with sqlalchemy'),
cfg.BoolOpt('use_db_reconnect',
default=False,
help='Enable the experimental use of database reconnect '
'on connection lost'),
cfg.IntOpt('db_retry_interval',
default=1,
help='seconds between db connection retries'),
cfg.BoolOpt('db_inc_retry_interval',
default=True,
help='Whether to increase interval between db connection '
'retries, up to db_max_retry_interval'),
cfg.IntOpt('db_max_retry_interval',
default=10,
help='max seconds between db connection retries, if '
'db_inc_retry_interval is enabled'),
cfg.IntOpt('db_max_retries',
default=20,
help='maximum db connection retries before error is raised. '
'(setting -1 implies an infinite retry count)'),
]
CONF = cfg.CONF
CONF.register_opts(database_opts, 'database')
def set_defaults(sql_connection, sqlite_db, max_pool_size=None,
max_overflow=None, pool_timeout=None):
"""Set defaults for configuration variables."""
cfg.set_defaults(database_opts,
connection=sql_connection,
sqlite_db=sqlite_db)
# Update the QueuePool defaults
if max_pool_size is not None:
cfg.set_defaults(database_opts,
max_pool_size=max_pool_size)
if max_overflow is not None:
cfg.set_defaults(database_opts,
max_overflow=max_overflow)
if pool_timeout is not None:
cfg.set_defaults(database_opts,
pool_timeout=pool_timeout)
def list_opts():
"""Returns a list of essential.config options available in the library.
The returned list includes all essential.config options which may be
registered at runtime by the library.
Each element of the list is a tuple. The first element is the name of the
group under which the list of elements in the second element will be
registered. A group name of None corresponds to the [DEFAULT] group in
config files.
The purpose of this is to allow tools like the Essential sample config
file generator to discover the options exposed to users by this library.
:returns: a list of (group_name, opts) tuples
"""
return [('database', copy.deepcopy(database_opts))]
| {
"content_hash": "d495ab7e9ed95a3db3630a6ce57a24cc",
"timestamp": "",
"source": "github",
"line_count": 159,
"max_line_length": 79,
"avg_line_length": 48.075471698113205,
"alnum_prop": 0.5024856096284668,
"repo_name": "gaolichuang/py-essential",
"id": "d0f2d175633008f6daadd26f47402e7ad364c21b",
"size": "8199",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "essential/db/options.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Perl",
"bytes": "814"
},
{
"name": "Python",
"bytes": "1440964"
}
],
"symlink_target": ""
} |
import sys
from select import select
import time
import math
#sys.path.append("./F310_Gamepad_Parser/core")
from F310_Gamepad_Parser.core.bus import *
from F310_Gamepad_Parser.core.parser_core import *
from logger import Logger
from robotServer import RobotServer
class RemoteControl:
# Create bus object
bus = Bus()
# Create a dictionary to be used to keep states from joy_core
states = { 'A':0, 'B':0, 'X':0, 'Y':0, \
'Back':0, 'Start':0, 'Middle':0, \
'Left':0, 'Right':0, 'Up':0, 'Down':0, \
'LB':0, 'RB':0, 'LT':0, 'RT':0, \
'LJ/Button':0, 'RJ/Button':0, \
'LJ/Left':0, 'LJ/Right':0, 'LJ/Up':0, 'LJ/Down':0, \
'RJ/Left':0, 'RJ/Right':0, 'RJ/Up':0, 'RJ/Down':0, \
'Byte0':0, 'Byte1':0, 'Byte2':0, 'Byte3':0, \
'Byte4':0, 'Byte5':0, 'Byte6':0, 'Byte7':0, \
'Byte0/INT':0, 'Byte1/INT':0, 'Byte2/INT':0, \
'Byte3/INT':0, 'Byte4/INT':0, 'Byte5/INT':0, \
'Byte6/INT':0, 'Byte7/INT':0}
checkController = True
def __init__(self):
self.robotServer = RobotServer()
#self.robotServer.getConnection()
self.parsercore = ParserCore(self.bus, self.states, self.checkController)
self.logger = Logger("robotImagesLog.txt")
self.logger.writeStartOfSequence()
def _scalePower(self, power, lowLim, upLim):
if(power == 0):
return 0
elif(abs(power) > 120):
return math.copysign(upLim, power)
else:
per = (float(abs(power) - lowLim)/float(120 - lowLim))
scalp = per * float(upLim- lowLim) + lowLim
return math.copysign(scalp, power)
def scaleMotorPower(self, left, right):
scaledLeft = self._scalePower(left, 10, 40)
scaledRight = self._scalePower(right, 10, 40)
return scaledLeft, scaledRight
def scalePowerForLog(self, left, right):
logLeft = float(left)/40.0
logRight = float(right)/40.0
return logLeft, logRight
def main(self):
timeout = 0.005
self.robotServer.getConnection()
print("robotConnected")
self.parsercore.start()
print("controller started")
self.quit = False
self.robotServer.readMessage()
self.robotServer.sendStart()
print('Enter Q to quit: ')
starttime = time.time()
loops = 0
while(self.quit == False):
fname = self.robotServer.readImage()
rlist, _, _ = select([sys.stdin], [], [], timeout)
if rlist:
print("input received")
s = sys.stdin.readline()
if(s == 'Q'):
print("Q detected. Quiting..")
self.quit = True
self.checkController = False
self.parsercore.join()
else:
print("Not 'Q'\nEnter Q to quit:")
leftPower = self.states['LJ/Up'] + self.states['LJ/Down']
rightPower = self.states['RJ/Up'] + self.states['RJ/Down']
leftPower, rightPower = self.scaleMotorPower(leftPower, rightPower)
logLeft , logRight = self.scalePowerForLog(leftPower, rightPower)
self.logger.log(fname + " " + str(logLeft) + " " + str(logRight))
#print(leftPower, rightPower)
sent = self.robotServer.sendAction(int(leftPower), int(rightPower))
loops += 1
nowtime = time.time()
fps = float(loops) / (nowtime - starttime)
print("FPS: " + str(fps) + " Number of frames: " + str(loops) + " total time: " + str(nowtime-starttime))
self.robotServer.close()
if __name__ == "__main__":
RemoteControl().main()
| {
"content_hash": "67ccf1c39ca19cd587c4fe14253b18e8",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 109,
"avg_line_length": 33.74226804123711,
"alnum_prop": 0.6324472960586618,
"repo_name": "ScottJordan/TangoRobot",
"id": "57cbf0ada8143b5ff269688ef0c59d047ac21491",
"size": "3273",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "DesktopLearner/RemoteVideoControl.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Arduino",
"bytes": "1348"
},
{
"name": "C",
"bytes": "202914"
},
{
"name": "C++",
"bytes": "2865954"
},
{
"name": "CMake",
"bytes": "182598"
},
{
"name": "Java",
"bytes": "2059577"
},
{
"name": "Makefile",
"bytes": "393531"
},
{
"name": "Objective-C",
"bytes": "5343"
},
{
"name": "Python",
"bytes": "44404"
}
],
"symlink_target": ""
} |
import tensorflow as tf
import numpy as np
from tensorflow.contrib.layers import fully_connected, batch_norm, dropout
from tensorflow.examples.tutorials.mnist import input_data
# Set random seed
tf.set_random_seed(123)
np.random.seed(123)
n_inputs = 28 * 28
n_hidden_1 = 100
n_hidden_2 = 100
n_hidden_3 = 100
n_hidden_4 = 100
n_hidden_5 = 100
learning_rate = 0.005
# Use only digits 0 to 4
n_outputs = 5
# Get data and separate digits 0-4 out
mnist = input_data.read_data_sets("/tmp/data/")
X_images, y_images = mnist.train.images, mnist.train.labels
X_images_test, y_images_test = mnist.test.images, mnist.test.labels
# Create 'index' and subset of MNIST
indices = [idx for idx in range(len(y_images)) if y_images[idx] < 5]
X_masked_train = X_images[indices]
y_masked_train = y_images[indices]
# Do same for test set
indices_test = [idx for idx in range(len(y_images_test)) if y_images_test[idx] < 5]
X_test = X_images_test[indices_test]
y_test = y_images_test[indices_test]
validation_metrics = {
"accuracy":
tf.contrib.learn.MetricSpec(
metric_fn=tf.contrib.metrics.streaming_accuracy,
prediction_key=tf.contrib.learn.prediction_key.PredictionKey.CLASSES)}
validation_monitor = tf.contrib.learn.monitors.ValidationMonitor(
x=X_test, y=y_test, early_stopping_rounds=50, metrics=validation_metrics)
# Construct graph
# Use He initalization
he_init = tf.contrib.layers.variance_scaling_initializer()
X = tf.placeholder(tf.float32, shape=(None, n_inputs), name='X')
y = tf.placeholder(tf.int64, shape=(None), name='y')
# Set up necessary variables for batch norm
is_training = tf.placeholder(tf.bool, shape=(), name='Is_Training')
bn_params = {'is_training': is_training, 'decay': 0.999, 'updates_collections': None}
# Set up drop out regularization
keep_prob = 0.5
with tf.contrib.framework.arg_scope([fully_connected],
normalizer_fn=batch_norm, normalizer_params=bn_params,
weights_initializer=he_init, scope='DNN'):
X_drop = dropout(X, keep_prob, is_training=is_training)
hidden_1 = fully_connected(X_drop, n_hidden_1,
activation_fn=tf.nn.elu, scope='Hidden_1')
hidden_1_drop = dropout(hidden_1, keep_prob, is_training=is_training)
hidden_2 = fully_connected(hidden_1_drop,
n_hidden_2, activation_fn=tf.nn.elu, scope='Hidden_2')
hidden_2_drop = dropout(hidden_2, keep_prob, is_training=is_training)
hidden_3 = fully_connected(hidden_2_drop,
n_hidden_3, activation_fn=tf.nn.elu, scope='Hidden_3')
hidden_3_drop = dropout(hidden_3, keep_prob, is_training=is_training)
hidden_4 = fully_connected(hidden_3_drop,
n_hidden_4, activation_fn=tf.nn.elu, scope='Hidden_4')
hidden_4_drop = dropout(hidden_4, keep_prob, is_training=is_training)
hidden_5 = fully_connected(hidden_4_drop,
n_hidden_5, activation_fn=tf.nn.elu, scope='Hidden_5')
hidden_5_drop = dropout(hidden_5, keep_prob, is_training=is_training)
logits = fully_connected(hidden_5_drop,
n_outputs, activation_fn=None, scope='Outputs')
with tf.name_scope('Loss'):
xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits)
loss = tf.reduce_mean(xentropy, name='Loss')
with tf.name_scope('Train'):
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
training_op = optimizer.minimize(loss)
with tf.name_scope('Eval'):
correct = tf.nn.in_top_k(logits, y, 1)
accuracy = tf.reduce_mean(tf.cast(correct, tf.float32))
init = tf.global_variables_initializer()
# Execution
n_epochs = 1000
batch_size = 200
batches = len(y_masked_train)//batch_size
with tf.Session() as sess:
init.run()
for epoch in range(n_epochs):
for k in range(batches):
X_batch = X_masked_train[k*batch_size:k*batch_size+batch_size]
y_batch = y_masked_train[k*batch_size:k*batch_size+batch_size]
sess.run(training_op, feed_dict={is_training: True, X: X_batch, y: y_batch})
# print('Max logits: ', max_logits.eval(feed_dict={X: X_test}))
# print('Max labels: ', max_labels.eval(feed_dict={y: y_test}))
acc_train = accuracy.eval(feed_dict={is_training: False, X: X_batch, y: y_batch})
acc_test = accuracy.eval(feed_dict={is_training: False, X: X_test, y: y_test})
if epoch % 5 == 0:
print(epoch, "Train accuracy: ", acc_train, "Test accuracy: ", acc_test)
| {
"content_hash": "eb838fb8fe9929760de098e2aa99d1fb",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 89,
"avg_line_length": 38.92035398230089,
"alnum_prop": 0.68781264211005,
"repo_name": "KT12/hands_on_machine_learning",
"id": "f8de62c84ddb339e46b20cbca95da488b6a80c68",
"size": "4533",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "5_layer_dnn.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2332942"
},
{
"name": "Python",
"bytes": "54890"
}
],
"symlink_target": ""
} |
import director.vtkAll as vtk
import director.thirdparty.numpyjsoncoder as nje
from collections import OrderedDict
from director import fieldcontainer
from director import transformUtils
import json
class ConstraintEncoder(nje.NumpyConvertEncoder):
def default(self, obj):
if isinstance(obj, vtk.vtkTransform):
pos, quat = transformUtils.poseFromTransform(obj)
return OrderedDict(position=pos, quaternion=quat)
elif isinstance(obj, fieldcontainer.FieldContainer):
d = OrderedDict()
d['class'] = type(obj).__name__
for key in obj._fields:
d[key] = getattr(obj, key)
return d
return nje.NumpyConvertEncoder.default(self, obj)
def ConstraintDecoder(dct):
return nje.NumpyDecoder(dct)
def encodeConstraints(dataObj, **kwargs):
return json.dumps(dataObj, cls=ConstraintEncoder, **kwargs)
def decodeConstraints(dataStream):
return json.loads(dataStream, object_hook=ConstraintDecoder)
def getPlanPoses(constraints, ikPlanner):
'''
Given a list of constraints, returns a dictionary of poses containing all
the poses that are references by the constraints by name
'''
poses = sorted([c.postureName for c in constraints if hasattr(c, 'postureName')])
poses = {poseName:list(ikPlanner.jointController.getPose(poseName)) for poseName in poses}
return poses
| {
"content_hash": "3be9c29d355c7818773f2b08e12ed034",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 94,
"avg_line_length": 35.25,
"alnum_prop": 0.7120567375886525,
"repo_name": "mitdrc/director",
"id": "d66379c00768da077fa5a9af3d05b8cec31feea1",
"size": "1410",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "src/python/director/ikconstraintencoder.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "119759"
},
{
"name": "C++",
"bytes": "427846"
},
{
"name": "CMake",
"bytes": "51004"
},
{
"name": "GLSL",
"bytes": "15443"
},
{
"name": "Makefile",
"bytes": "4981"
},
{
"name": "Matlab",
"bytes": "161948"
},
{
"name": "Python",
"bytes": "2123776"
},
{
"name": "Shell",
"bytes": "4236"
}
],
"symlink_target": ""
} |
import hashlib
from contextlib import contextmanager
from io import BytesIO
import pytest
from pex.compatibility import PY2
from pex.http import Context, RequestsContext, StreamFilelike, UrllibContext
from pex.link import Link
from pex.util import named_temporary_file
from pex.variables import Variables
try:
from unittest import mock
except ImportError:
import mock
try:
from responses import RequestsMock
except ImportError:
RequestsMock = None
try:
import requests
except ImportError:
requests = None
BLOB = b'random blob of data'
NO_REQUESTS = 'RequestsMock is None or requests is None'
try:
from httplib import HTTPMessage
except ImportError:
from http.client import HTTPMessage
def make_md5(blob):
md5 = hashlib.md5()
md5.update(blob)
return md5.hexdigest()
@contextmanager
def patch_requests():
requests_mock = RequestsMock()
requests_mock.start()
yield requests_mock
requests_mock.stop()
@contextmanager
def make_url(blob, md5_fragment=None):
url = 'http://pypi.python.org/foo.tar.gz'
if md5_fragment:
url += '#md5=%s' % md5_fragment
with patch_requests() as responses:
responses.add(
responses.GET,
url,
status=200,
body=blob,
content_type='application/x-compressed')
yield url
@pytest.mark.skipif(NO_REQUESTS)
def test_stream_filelike_with_correct_md5():
with make_url(BLOB, make_md5(BLOB)) as url:
request = requests.get(url)
filelike = StreamFilelike(request, Link.wrap(url))
assert filelike.read() == BLOB
@pytest.mark.skipif(NO_REQUESTS)
def test_stream_filelike_with_incorrect_md5():
with make_url(BLOB, 'f' * 32) as url:
request = requests.get(url)
filelike = StreamFilelike(request, Link.wrap(url))
with pytest.raises(Context.Error):
filelike.read()
@pytest.mark.skipif(NO_REQUESTS)
def test_stream_filelike_without_md5():
with make_url(BLOB) as url:
request = requests.get(url)
filelike = StreamFilelike(request, Link.wrap(url))
assert filelike.read() == BLOB
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context():
context = RequestsContext(verify=False)
with make_url(BLOB, make_md5(BLOB)) as url:
assert context.read(Link.wrap(url)) == BLOB
with make_url(BLOB, make_md5(BLOB)) as url:
filename = context.fetch(Link.wrap(url))
with open(filename, 'rb') as fp:
assert fp.read() == BLOB
# test local reading
with named_temporary_file() as tf:
tf.write(b'goop')
tf.flush()
assert context.read(Link.wrap(tf.name)) == b'goop'
class MockHttpLibResponse(BytesIO):
def __init__(self, data):
BytesIO.__init__(self, data)
self.status = 200
self.version = 'HTTP/1.1'
self.reason = 'OK'
if PY2:
self.msg = HTTPMessage(BytesIO(b'Content-Type: application/x-compressed\r\n'))
else:
self.msg = HTTPMessage()
self.msg.add_header('Content-Type', 'application/x-compressed')
def getheaders(self):
return list(self.msg.items())
def isclosed(self):
return self.closed
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_invalid_retries():
env = Variables(environ={'PEX_HTTP_RETRIES': '-1'})
with pytest.raises(ValueError):
RequestsContext(verify=False, env=env)
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_retries_from_environment():
retry_count = '42'
env = Variables({'PEX_HTTP_RETRIES': retry_count})
assert RequestsContext(verify=False, env=env)._max_retries == int(retry_count)
def timeout_side_effect(timeout_error=None, num_timeouts=1):
timeout_error = timeout_error or requests.packages.urllib3.exceptions.ConnectTimeoutError
url = 'http://pypi.python.org/foo.tar.gz'
num_requests = [0] # hack, because python closures?
def timeout(*args, **kwargs):
if num_requests[0] < num_timeouts:
num_requests[0] += 1
raise timeout_error(None, url, 'Time Out')
else:
return MockHttpLibResponse(BLOB)
return url, timeout
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_retries_connect_timeout():
with mock.patch.object(
requests.packages.urllib3.connectionpool.HTTPConnectionPool,
'_make_request') as mock_make_request:
url, mock_make_request.side_effect = timeout_side_effect()
context = RequestsContext(verify=False)
data = context.read(Link.wrap(url))
assert data == BLOB
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_retries_connect_timeout_retries_exhausted():
with mock.patch.object(
requests.packages.urllib3.connectionpool.HTTPConnectionPool,
'_make_request') as mock_make_request:
url, mock_make_request.side_effect = timeout_side_effect(num_timeouts=3)
env = Variables(environ={'PEX_HTTP_RETRIES': '2'})
context = RequestsContext(verify=False, env=env)
with pytest.raises(Context.Error):
context.read(Link.wrap(url))
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_retries_read_timeout():
with mock.patch.object(
requests.packages.urllib3.connectionpool.HTTPConnectionPool,
'_make_request') as mock_make_request:
url, mock_make_request.side_effect = timeout_side_effect(
timeout_error=requests.packages.urllib3.exceptions.ReadTimeoutError)
context = RequestsContext(verify=False)
data = context.read(Link.wrap(url))
assert data == BLOB
@pytest.mark.skipif(NO_REQUESTS)
def test_requests_context_retries_read_timeout_retries_exhausted():
with mock.patch.object(
requests.packages.urllib3.connectionpool.HTTPConnectionPool,
'_make_request') as mock_make_request:
url, mock_make_request.side_effect = timeout_side_effect(
timeout_error=requests.packages.urllib3.exceptions.ReadTimeoutError,
num_timeouts=3)
env = Variables(environ={'PEX_HTTP_RETRIES': '2'})
context = RequestsContext(verify=False, env=env)
with pytest.raises(Context.Error):
context.read(Link.wrap(url))
def test_urllib_context_utf8_encoding():
BYTES = b'this is a decoded utf8 string'
with named_temporary_file() as tf:
tf.write(BYTES)
tf.flush()
local_link = Link.wrap(tf.name)
# Trick UrllibContext into thinking this is a remote link
class MockUrllibContext(UrllibContext):
def open(self, link):
return super(MockUrllibContext, self).open(local_link)
context = MockUrllibContext()
assert context.content(Link.wrap('http://www.google.com')) == BYTES.decode(
UrllibContext.DEFAULT_ENCODING)
| {
"content_hash": "b8ad154dc7a1f0a6c1ff382438acc336",
"timestamp": "",
"source": "github",
"line_count": 237,
"max_line_length": 91,
"avg_line_length": 27.354430379746834,
"alnum_prop": 0.7083140521363567,
"repo_name": "sixninetynine/pex",
"id": "aa16ee9dc88c10d450a15223664205c5e01e6e41",
"size": "6615",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/test_http.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "381142"
},
{
"name": "Shell",
"bytes": "311"
}
],
"symlink_target": ""
} |
from cereal import car
from opendbc.can.parser import CANParser
from selfdrive.car.ford.values import DBC
from selfdrive.config import Conversions as CV
from selfdrive.car.interfaces import RadarInterfaceBase
RADAR_MSGS = list(range(0x500, 0x540))
def _create_radar_can_parser(car_fingerprint):
msg_n = len(RADAR_MSGS)
signals = list(zip(['X_Rel'] * msg_n + ['Angle'] * msg_n + ['V_Rel'] * msg_n,
RADAR_MSGS * 3,
[0] * msg_n + [0] * msg_n + [0] * msg_n))
checks = list(zip(RADAR_MSGS, [20]*msg_n))
return CANParser(DBC[car_fingerprint]['radar'], signals, checks, 1)
class RadarInterface(RadarInterfaceBase):
def __init__(self, CP):
super().__init__(CP)
self.validCnt = {key: 0 for key in RADAR_MSGS}
self.track_id = 0
self.rcp = _create_radar_can_parser(CP.carFingerprint)
self.trigger_msg = 0x53f
self.updated_messages = set()
def update(self, can_strings):
vls = self.rcp.update_strings(can_strings)
self.updated_messages.update(vls)
if self.trigger_msg not in self.updated_messages:
return None
ret = car.RadarData.new_message()
errors = []
if not self.rcp.can_valid:
errors.append("canError")
ret.errors = errors
for ii in sorted(self.updated_messages):
cpt = self.rcp.vl[ii]
if cpt['X_Rel'] > 0.00001:
self.validCnt[ii] = 0 # reset counter
if cpt['X_Rel'] > 0.00001:
self.validCnt[ii] += 1
else:
self.validCnt[ii] = max(self.validCnt[ii] - 1, 0)
#print ii, self.validCnt[ii], cpt['VALID'], cpt['X_Rel'], cpt['Angle']
# radar point only valid if there have been enough valid measurements
if self.validCnt[ii] > 0:
if ii not in self.pts:
self.pts[ii] = car.RadarData.RadarPoint.new_message()
self.pts[ii].trackId = self.track_id
self.track_id += 1
self.pts[ii].dRel = cpt['X_Rel'] # from front of car
self.pts[ii].yRel = cpt['X_Rel'] * cpt['Angle'] * CV.DEG_TO_RAD # in car frame's y axis, left is positive
self.pts[ii].vRel = cpt['V_Rel']
self.pts[ii].aRel = float('nan')
self.pts[ii].yvRel = float('nan')
self.pts[ii].measured = True
else:
if ii in self.pts:
del self.pts[ii]
ret.points = list(self.pts.values())
self.updated_messages.clear()
return ret
| {
"content_hash": "0837bb1c059f3ce931b8049b4533c0fb",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 114,
"avg_line_length": 33.61971830985915,
"alnum_prop": 0.6099706744868035,
"repo_name": "vntarasov/openpilot",
"id": "20a435b0821dffa9917b62cdb7330fd13bca3177",
"size": "2410",
"binary": false,
"copies": "1",
"ref": "refs/heads/eon-0710",
"path": "selfdrive/car/ford/radar_interface.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "44441"
},
{
"name": "C",
"bytes": "7910872"
},
{
"name": "C++",
"bytes": "2526252"
},
{
"name": "CMake",
"bytes": "7454"
},
{
"name": "Cap'n Proto",
"bytes": "55667"
},
{
"name": "Dockerfile",
"bytes": "3141"
},
{
"name": "Groovy",
"bytes": "2620"
},
{
"name": "Makefile",
"bytes": "49988"
},
{
"name": "NSIS",
"bytes": "7977"
},
{
"name": "Objective-C",
"bytes": "72319"
},
{
"name": "Python",
"bytes": "1188467"
},
{
"name": "Shell",
"bytes": "13772"
}
],
"symlink_target": ""
} |
from django.db import models
from django import forms
from django.forms.formsets import formset_factory
from django.test import TestCase
from ..templatetags import admin2_tags
from ..views import IndexView
class TagsTestsModel(models.Model):
field1 = models.CharField(max_length=23)
field2 = models.CharField('second field', max_length=42)
def was_published_recently(self):
return True
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Meta:
verbose_name = "Tags Test Model"
verbose_name_plural = "Tags Test Models"
class TagsTestForm(forms.Form):
visible_1 = forms.CharField()
visible_2 = forms.CharField()
invisible_1 = forms.HiddenInput()
TagsTestFormSet = formset_factory(TagsTestForm)
class TagsTests(TestCase):
def setUp(self):
self.instance = TagsTestsModel()
def test_admin2_urlname(self):
self.assertEquals(
"admin2:None_None_index",
admin2_tags.admin2_urlname(IndexView, "index")
)
def test_model_verbose_name_as_model_class(self):
self.assertEquals(
TagsTestsModel._meta.verbose_name,
admin2_tags.model_verbose_name(TagsTestsModel)
)
def test_model_verbose_name_as_model_instance(self):
self.assertEquals(
self.instance._meta.verbose_name,
admin2_tags.model_verbose_name(self.instance)
)
def test_model_verbose_name_plural_as_model_class(self):
self.assertEquals(
TagsTestsModel._meta.verbose_name_plural,
admin2_tags.model_verbose_name_plural(TagsTestsModel)
)
def test_model_verbose_name_plural_as_model_instance(self):
self.assertEquals(
self.instance._meta.verbose_name_plural,
admin2_tags.model_verbose_name_plural(self.instance)
)
def test_model_field_verbose_name_autogenerated(self):
self.assertEquals(
'field1',
admin2_tags.model_attr_verbose_name(self.instance, 'field1')
)
def test_model_field_verbose_name_overridden(self):
self.assertEquals(
'second field',
admin2_tags.model_attr_verbose_name(self.instance, 'field2')
)
def test_model_method_verbose_name(self):
self.assertEquals(
'Published recently?',
admin2_tags.model_attr_verbose_name(self.instance, 'was_published_recently')
)
def test_formset_visible_fieldlist(self):
formset = TagsTestFormSet()
self.assertEquals(
admin2_tags.formset_visible_fieldlist(formset),
[u'Visible 1', u'Visible 2']
)
def test_verbose_name_for(self):
app_verbose_names = {
u'app_one_label': 'App One Verbose Name',
}
self.assertEquals(
"App One Verbose Name",
admin2_tags.verbose_name_for(app_verbose_names, 'app_one_label')
)
| {
"content_hash": "42972df09bd7cfc82c5fde670dfb7fb3",
"timestamp": "",
"source": "github",
"line_count": 101,
"max_line_length": 88,
"avg_line_length": 29.861386138613863,
"alnum_prop": 0.6389257294429708,
"repo_name": "yarbelk/django-admin2",
"id": "0d9bf5b76b959216428f7d890e964d7c3ada3725",
"size": "3016",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "djadmin2/tests/test_admin2tags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "34904"
},
{
"name": "Python",
"bytes": "230914"
}
],
"symlink_target": ""
} |
from django.db import models
class Event(models.Model):
start = models.ForeignKey('TimeRepresentation')
class TimeRepresentation(models.Model):
hora = models.TimeField(null=True)
__test__ = {'API_TESTS': """
>>> from datetime import time
>>> t = TimeRepresentation.objects.create(hora=time(0, 0))
>>> ev = Event.objects.create(start=t)
# If we access without select_related, it works fine
>>> evs1 = Event.objects.all()
>>> evs1[0].start.hora
datetime.time(0, 0)
# If we access with select_related, it works too
>>> evs2 = Event.objects.all().select_related('start')
>>> evs2[0].start.hora
datetime.time(0, 0)
"""}
| {
"content_hash": "8fec1cd077140ed43812cd91947d1216",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 58,
"avg_line_length": 25.2,
"alnum_prop": 0.6936507936507936,
"repo_name": "schmidsi/django-pyodbc",
"id": "fcc91c46df5d4facc510e5e880705cf481303947",
"size": "631",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "tests/issue049/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "220330"
}
],
"symlink_target": ""
} |
from django.db import models
class Category(models.Model):
name = models.CharField(blank=False, max_length=255)
description = models.TextField(blank=True, default='')
class Meta:
db_table = 'category'
def __str__(self):
return self.name
| {
"content_hash": "bd3bad5a7a49974aab9388de738ca7c3",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 58,
"avg_line_length": 23.083333333333332,
"alnum_prop": 0.6462093862815884,
"repo_name": "vuonghv/brs",
"id": "369efc3a0200bfb8b7e5c00be1e7979a3b226956",
"size": "277",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "apps/categories/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1062528"
},
{
"name": "HTML",
"bytes": "1877883"
},
{
"name": "JavaScript",
"bytes": "3157021"
},
{
"name": "PHP",
"bytes": "1684"
},
{
"name": "Python",
"bytes": "84367"
}
],
"symlink_target": ""
} |
from __future__ import print_function, division
from glob import glob
import os
import json
import numpy as np
import scipy
from sklearn.preprocessing import OneHotEncoder
from sklearn.metrics import confusion_matrix
#np.set_printoptions(precision=4, linewidth=100)
from matplotlib import pyplot as plt
import utils; reload(utils)
from utils import plots, get_batches, plot_confusion_matrix, get_data
from numpy.random import random, permutation
from scipy import misc, ndimage
from scipy.ndimage.interpolation import zoom
import keras
from keras import backend as K
from keras.utils.data_utils import get_file
from keras.models import Sequential
from keras.layers import Input
from keras.layers.core import Flatten, Dense, Dropout, Lambda
from keras.layers.convolutional import Convolution2D, MaxPooling2D, ZeroPadding2D
from keras.optimizers import SGD, RMSprop
from keras.preprocessing import image
from vgg16 import Vgg16
import bcolz
import utils; reload(utils)
from utils import *
path="data/dogscats/"
#path="data/dogscats/sample/"
model_path=path+"models/"
if not os.path.exists(model_path): os.mkdir(model_path)
batch_size=64
def get_vgg():
vgg=Vgg16()
model=vgg.model
return model
def get_train_valid_batches():
valid_batches=get_batches(path+"valid",shuffle=False, batch_size=batch_size)
train_batches=get_batches(path+"train", shuffle=False, batch_size=batch_size)
return train_batches, valid_batches
def get_train_valid_data():
train_data=get_data(path+"train")
valid_data=get_data(path+"valid")
return train_data, valid_data
def save_array(fname, array): c=bcolz.carray(array, rootdir=fname, mode='w'); c.flush()
def load_array(fname): return bcolz.open(fname)[:]
def onehot(x): return np.array(OneHotEncoder().fit_transform(x.reshape(-1,1)).todense())
def get_data_labels():
train_batches, valid_batches=get_train_valid_batches()
valid_classes=valid_batches.classes
train_classes=train_batches.classes
valid_labels=onehot(valid_classes)
train_labels=onehot(train_classes)
train_data, valid_data=get_train_valid_data()
return (train_batches, valid_batches,
train_data, valid_data,
train_classes,valid_classes,
train_labels, valid_labels)
def get_labels():
ans=get_data_labels()
return ans[-2],ans[-1]
def plots_idx(idx, titles=None):
plots([image.load_img(path+"valid/"+filenames[i]) for i in idx], titles=titles)
def gen_vgg_features(model, cache=False):
if not cache:
train_features=model.predict(train_data, batch_size=batch_size)
valid_features=model.predict(valid_data, batch_size=batch_size)
save_array(model_path+"train_lastlayer_features.bc", train_features)
save_array(model_path+"valid_lastlayer_features.bc", valid_features)
else:
train_features=load_array(model_path+"train_lastlayer_features.bc")
valid_features=load_array(model_path+"valid_lastlayer_features.bc")
return train_features, valid_features
def linear_model(train_features, valid_features, train_labels,valid_labels):
lm=Sequential([ Dense(2, activation="softmax", input_shape=(1000,)) ])
lm.compile(optimizer=RMSprop(lr=0.1), loss="categorical_crossentropy", metrics=["accuracy"])
lm.fit(train_features, train_labels, nb_epoch=epoch, batch_size=batch_size, validation_data=(valid_features, valid_labels))
return lm
def fit_lm():
model=get_vgg()
train_features, valid_features=gen_vgg_features(model, cache=False)
train_labels,valid_labels=get_labels()
lm=linear_model(train_features, valid_features, train_labels,valid_labels, epoch=3)
return lm
def test_model(model, valid_data, valid_labels, size=10, batch_size=4):
choices=np.random.choice(range(len(valid_data)), size)
data=valid_data[choices]
class_probs=model.predict(valid_data)
pred_class=np.argmax(class_probs, axis=1)
return class_probs
def viz_confusion(model, valid_features, valid_labels, valid_classes)
n_view=4
preds=model.predict_classes(valid_features, batch_size=batch_size)
probs=model.predict_proba(valid_features, batch_size)[:,0]
#case 1
correct=np.where(preds==valid_labels[:,1])[0]
idx=permutation(correct)[:n_view]
plots_idx(idx, probs[idx])
#case 2
incorrect=np.where(preds!=valid_labels[:,1])[0]
idx=permutation(incorrect)[:n_view]
plots_idx(idx, probs[idx])
#case 3
true_positive_cats=np.where((preds==0) &(preds==valid_labels[:,1]))[0]
most_correct_cats=np.argsort(probs[true_positive_cats])[::-1][:n_view]
plots_idx(true_positive_cats[most_correct_cats], probs[true_positive_cats][most_correct_cats])
#case 4
true_positive_dogs=np.where((preds==1) &(preds==valid_labels[:,1]))[0]
most_correct_dogs=np.argsort(probs[true_positive_dogs])[:n_view]
plots_idx(true_positive_dogs[most_correct_dogs], 1-probs[true_positive_dogs][most_correct_dogs])
#case 5
wrong_cats=np.where((preds==0) & (preds!=valid_labels[:,1]))[0]
most_incorrect_cats=np.argsort(probs[wrong_cats])[::-1][:n_view]
plots_idx(wrong_cats[most_incorrect_cats], probs[wrong_cats][most_incorrect_cats])
#case 6
wrong_dogs=np.where((preds==1) &(preds!=valid_labels[:,1]))[0]
most_incorrect_dogs=np.argsort(probs[wrong_dogs])[:n_view]
plots_idx(wrong_dogs[most_incorrect_dogs], probs[wrong_dogs[most_incorrect_dogs]])
#case 7
most_uncertain=np.argsort(np.abs(probs-0.5))
plots_idx(most_uncertain[:n_view], probs[most_uncertain][:n_view])
#confusion matrix
cm=confusion_matrix(valid_classes, preds)
plot_confusion_matrix(cm, valid_batches.class_indices)
def finetune(model, tain_data, train_labels, valid_data, valid_labels, cache=False):
#vgg model
model.pop()
for layer in model.layers:
layer.trainable=False
model.add(Dense(2, activation="softmax"))
opt=RMSprop(lr=0.1)
model.compile(optimizer=opt, loss="categorical_crossentropy", metrics=["accuracy"])
if cache:
model.load_weights(model_path+"finetune1.h5")
return model
else:
gen=image.ImageDataGenerator()
train_batches=gen.flow(train_data, train_labels, batch_size=batch_size, shuffle=True)
valid_batches=gen.flow(valid_data, valid_labels, batch_size=batch_size, shuffle=True)
def fit_model(model, train_batches, valid_batches, nb_epoch=1):
model.fit_generator(train_batches, samples_per_epoch=train_batches.N, nb_epoch=nb_epoch,
validation_data=valid_batches, nb_val_samples=valid_batches.N)
fit_model(model, train_batches, valid_batches, nb_epoch=2)
model.save_weights(model_path+"finetune1.h5")
return model
def main():
#Train liner model
lm=fit_lm()
#Fine tune vgg model
(train_batches, valid_batches,
train_data, valid_data,
train_classes,valid_classes,
train_labels, valid_labels)=get_data_labels()
vgg_finetuned=finetune(get_vgg(), tain_data, train_labels, valid_data, valid_labels)
vgg_finetuned.evaluate(valid_data, valid_labels)
if __name__=="__main__":
return """This code used VGG16 convolutional features, with FC layers
to achieve accuracy of 97.5 on validation data"""
| {
"content_hash": "9d11a71368640a3d3d34b136d798a79f",
"timestamp": "",
"source": "github",
"line_count": 182,
"max_line_length": 127,
"avg_line_length": 39.79120879120879,
"alnum_prop": 0.7105771886219276,
"repo_name": "kishorepv/deep-learning-projects",
"id": "1c7ac762e47a7a14b9d9821bb34b3a327d49fa2e",
"size": "7242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "kaggle-dogs-cats.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "35416"
}
],
"symlink_target": ""
} |
from spacy.en import English
from selenium import webdriver
from pyvirtualdisplay import Display
from bs4 import BeautifulSoup
import requests
import re
import numpy as np
class Scraper:
"""
Base class for scraping sites with Selenium and Beautiful Soup
"""
def __init__(self, driver_path):
self.display = Display(visible=0, size=(1920, 1080))
self.display.start()
# Firefox 47+ is incomparible with Selenium 2.53+; use Chrome
self.driver = webdriver.Chrome(driver_path)
self.nlp = English()
def lemmatize(self, texts):
"""
Lemmatizes each word, i.e. lower case and no inflection
"""
lems = lambda x: [w.lemma_ for w in self.nlp(x) if not (w.is_stop or w.is_punct)]
if type(texts) is str:
text_lemmas = lems(text)
elif type(texts) is list:
text_lemmas = []
for text in texts:
if type(text) is str:
text_lemmas.append(lems(text))
elif type(text) is list:
text_item_lemmas = []
for text_item in text:
text_item_lemmas.extend(lems(text_item))
text_lemmas.append(text_item_lemmas)
else:
print(type(text), text)
raise TypeError('Lemmatize list items are not strings or lists')
else:
print(type(texts), texts)
raise TypeError('Lemmatize input is not a string or list')
return text_lemmas
def parse_url(self, url, tag, attrs=None, target=None, regex=None):
"""
Retrieves a tag in a url's source, optionally extracting content
"""
try:
response = requests.get(url)
soup = BeautifulSoup(response.content, 'lxml')
parse = soup.find(tag, attrs)
# Optionally extract a target attribute
if target:
parse = parse[target]
# Optionally apply a regex
if regex:
parse = re.findall(regex, str(parse))
except:
parse = None
return parse
def write(self, write_items, write_files):
"""
Writes a string to file or a list of strings separated by newlines
"""
files = []
for f in write_files:
files.append(open(f, 'w'))
for i,item in enumerate(write_items):
if type(item) is str:
files[i].write(item + '\n')
elif type(item) is list:
for row in item:
files[i].write(row + '\n')
else:
print(type(item), item)
raise TypeError('Write input is not a string or list')
for f in files:
f.close()
| {
"content_hash": "8e7b7c17e2b1ae70e9fa12a789e21836",
"timestamp": "",
"source": "github",
"line_count": 91,
"max_line_length": 89,
"avg_line_length": 31.197802197802197,
"alnum_prop": 0.5343430785487848,
"repo_name": "GautamShine/scrapers",
"id": "832ecd8bd81e99c27cbed2b616bbcf758b6eb13d",
"size": "2862",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scraper.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "9413"
}
],
"symlink_target": ""
} |
from collections import defaultdict
from axelrod import Player
class Retaliate(Player):
"""
A player starts by cooperating but will retaliate once the opponent
has won more than 10 percent times the number of defections the player has.
"""
classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def __init__(self, retaliation_threshold=0.1):
"""
Uses the basic init from the Player class, but also set the name to
include the retaliation setting.
"""
Player.__init__(self)
self.retaliation_threshold = retaliation_threshold
self.name = (
'Retaliate (' +
str(self.retaliation_threshold) + ')')
self.play_counts = defaultdict(int)
self.init_args = (retaliation_threshold,)
def strategy(self, opponent):
"""
If the opponent has played D to my C more often than x% of the time
that I've done the same to him, play D. Otherwise, play C.
"""
if len(self.history):
last_round = (self.history[-1], opponent.history[-1])
self.play_counts[last_round] += 1
CD_count = self.play_counts[('C', 'D')]
DC_count = self.play_counts[('D', 'C')]
if CD_count > DC_count * self.retaliation_threshold:
return 'D'
return 'C'
def reset(self):
Player.reset(self)
self.play_counts = defaultdict(int)
class Retaliate2(Retaliate):
"""
Retaliate player with a threshold of 8 percent.
"""
def __init__(self, retaliation_threshold=0.08):
super(self.__class__, self).__init__(
retaliation_threshold=retaliation_threshold)
class Retaliate3(Retaliate):
"""
Retaliate player with a threshold of 5 percent.
"""
def __init__(self, retaliation_threshold=0.05):
super(self.__class__, self).__init__(
retaliation_threshold=retaliation_threshold)
class LimitedRetaliate(Player):
"""
A player that co-operates unless the opponent defects and wins.
It will then retaliate by defecting. It stops when either, it has beaten
the opponent 10 times more often that it has lost or it reaches the
retaliation limit (20 defections).
"""
classifier = {
'memory_depth': float('inf'), # Long memory
'stochastic': False,
'inspects_source': False,
'manipulates_source': False,
'manipulates_state': False
}
def __init__(self, retaliation_threshold = 0.1, retaliation_limit = 20,):
"""
Uses the basic init from the Player class, but also set the name to
include the retaliation setting.
"""
Player.__init__(self)
self.retaliating = False
self.retaliation_count = 0
self.retaliation_threshold = retaliation_threshold
self.retaliation_limit = retaliation_limit
self.play_counts = defaultdict(int)
self.init_args = (retaliation_threshold, retaliation_limit)
self.name = (
'Limited Retaliate (' +
str(self.retaliation_threshold) +
'/' + str(self.retaliation_limit) + ')')
def strategy(self, opponent):
"""
If the opponent has played D to my C more often than x% of the time
that I've done the same to him, retaliate by playing D but stop doing
so once I've hit the retaliation limit.
"""
if len(self.history):
last_round = (self.history[-1], opponent.history[-1])
self.play_counts[last_round] += 1
CD_count = self.play_counts[('C', 'D')]
DC_count = self.play_counts[('D', 'C')]
if CD_count > DC_count * self.retaliation_threshold:
self.retaliating = True
else:
self.retaliating = False
self.retaliation_count = 0
#history = list(zip(self.history, opponent.history))
#if history.count(('C', 'D')) > (
#history.count(('D', 'C')) * self.retaliation_threshold):
#self.retaliating = True
#else:
#self.retaliating = False
#self.retaliation_count = 0
if self.retaliating:
if self.retaliation_count < self.retaliation_limit:
self.retaliation_count += 1
return 'D'
else:
self.retaliation_count = 0
self.retaliating = False
return 'C'
def reset(self):
Player.reset(self)
self.play_counts = defaultdict(int)
self.retaliating = False
self.retaliation_count = 0
class LimitedRetaliate2(LimitedRetaliate):
"""
LimitedRetaliate player with a threshold of 8 percent and a
retaliation limit of 15.
"""
def __init__(self, retaliation_threshold=0.08, retaliation_limit=15):
super(self.__class__, self).__init__(
retaliation_threshold=retaliation_threshold,
retaliation_limit=retaliation_limit)
class LimitedRetaliate3(LimitedRetaliate):
"""
LimitedRetaliate player with a threshold of 5 percent and a
retaliation limit of 20.
"""
def __init__(self, retaliation_threshold=0.05, retaliation_limit=20):
super(self.__class__, self).__init__(
retaliation_threshold=retaliation_threshold,
retaliation_limit=retaliation_limit)
| {
"content_hash": "6bf8fe3c6cfcbb1989604edb7a5a49d1",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 79,
"avg_line_length": 32.45882352941177,
"alnum_prop": 0.5962305183037332,
"repo_name": "risicle/Axelrod",
"id": "1efbaa2a28fd0953205948f2c833651e4b4aaa57",
"size": "5518",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "axelrod/strategies/retaliate.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "308196"
},
{
"name": "Shell",
"bytes": "593"
}
],
"symlink_target": ""
} |
"""hw module tests."""
import mox
import stubout
from google.apputils import app
from google.apputils import basetest
from simian.mac.common import hw
class HwModuleTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
class SystemProfileTest(mox.MoxTestBase):
def setUp(self):
mox.MoxTestBase.setUp(self)
self.stubs = stubout.StubOutForTesting()
self.sp = hw.SystemProfile()
def tearDown(self):
self.mox.UnsetStubs()
self.stubs.UnsetAll()
def testInit(self):
"""Test __init__()."""
self.assertEqual(self.sp._profile, {})
self.assertEqual(self.sp._include_only, None)
temp_sp = hw.SystemProfile(include_only='foo')
self.assertEqual(temp_sp._include_only, 'foo')
def testGetSystemProfilerOutput(self):
"""Test _GetSystemProfilerOutput()."""
stdout = 'out'
stderr = ''
self.mox.StubOutWithMock(hw.subprocess, 'Popen', True)
mock_sp = self.mox.CreateMockAnything()
hw.subprocess.Popen(
['/usr/sbin/system_profiler', '-XML'],
stdout = hw.subprocess.PIPE,
stderr = hw.subprocess.PIPE).AndReturn(mock_sp)
mock_sp.communicate().AndReturn((stdout, stderr))
mock_sp.wait().AndReturn(0)
hw.subprocess.Popen(
['/usr/sbin/system_profiler', '-XML', 'SPNetworkDataType'],
stdout = hw.subprocess.PIPE,
stderr = hw.subprocess.PIPE).AndReturn(mock_sp)
mock_sp.communicate().AndReturn((stdout, stderr))
mock_sp.wait().AndReturn(0)
self.mox.ReplayAll()
self.assertEqual(stdout, self.sp._GetSystemProfilerOutput())
self.sp._include_only = ['network', 'unknown thing']
self.assertEqual(stdout, self.sp._GetSystemProfilerOutput())
self.mox.VerifyAll()
def testGetSystemProfile(self):
"""Test _GetSystemProfile()."""
sp_xml = 'foo'
mock_plist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.sp, '_GetSystemProfilerOutput')
self.mox.StubOutWithMock(hw.plist, 'ApplePlist', True)
self.sp._GetSystemProfilerOutput().AndReturn(sp_xml)
hw.plist.ApplePlist(sp_xml).AndReturn(mock_plist)
mock_plist.Parse().AndReturn(None)
mock_plist.GetContents().AndReturn('contents')
self.mox.ReplayAll()
self.sp._GetSystemProfile()
self.assertEqual(self.sp._system_profile_xml, sp_xml)
self.assertEqual(self.sp._system_profile, 'contents')
self.mox.VerifyAll()
def testGetSystemProfilePlistParseError(self):
"""Test _GetSystemProfile() with plist.Error raised when calling Parse()."""
sp_xml = 'foo'
mock_plist = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.sp, '_GetSystemProfilerOutput')
self.mox.StubOutWithMock(hw.plist, 'ApplePlist', True)
self.sp._GetSystemProfilerOutput().AndReturn(sp_xml)
hw.plist.ApplePlist(sp_xml).AndReturn(mock_plist)
mock_plist.Parse().AndRaise(hw.plist.Error)
self.mox.ReplayAll()
self.assertRaises(hw.SystemProfilerError, self.sp._GetSystemProfile)
self.mox.VerifyAll()
def testFindAll(self):
"""Test _FindAll()."""
funcs = (
'_GetSystemProfile',
'_FindHDDSerial',
'_FindMachineModel',
'_FindSerialNumber',
'_FindPlatformUuid',
'_FindMacAddresses',
'_FindBatteryInfo',
'_FindUSBDevices')
for func_name in funcs:
self.mox.StubOutWithMock(self.sp, func_name)
getattr(self.sp, func_name)().AndReturn(None)
self.mox.ReplayAll()
self.sp._FindAll()
self.mox.VerifyAll()
def testFindHddSerialWithNoNvme(self):
self.mox.StubOutWithMock(self.sp, '_GetSystemProfilerOutput')
self.sp._GetSystemProfilerOutput().AndReturn('''
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<array>
<dict>
<key>_SPCommandLineArguments</key>
<array>
<string>/usr/sbin/system_profiler</string>
<string>-nospawn</string>
<string>-xml</string>
<string>SPNVMeDataType</string>
<string>-detailLevel</string>
<string>full</string>
</array>
<key>_SPResponseTime</key>
<real>0.2080950140953064</real>
<key>_dataType</key>
<string>SPNVMeDataType</string>
</dict>
</array>
</plist>
'''.strip())
self.mox.ReplayAll()
self.sp._FindAll()
self.mox.VerifyAll()
def testGetProfile(self):
"""Test GetProfile()."""
self.sp._profile = {}
self.mox.StubOutWithMock(self.sp, '_FindAll')
self.sp._FindAll().AndReturn(None)
self.mox.ReplayAll()
self.assertEqual({}, self.sp.GetProfile())
self.mox.VerifyAll()
def testGetProfileWhenReady(self):
"""Test GetProfile()."""
self.sp._profile = 'foo'
self.mox.ReplayAll()
self.assertEqual('foo', self.sp.GetProfile())
self.mox.VerifyAll()
def testFindBatteryInfoWithMissingSerial(self):
"""Test _FindBatteryInfo() with a missing serial number."""
# sppower_battery_model_info dict lacking sppower_battery_serial_number
spd = [{
'_dataType': 'SPPowerDataType',
'_items': [{'fookey': 'foovalue', 'sppower_battery_model_info': {}}],
}]
self.sp._system_profile = spd
self.sp._FindBatteryInfo()
self.assertEqual('unknown', self.sp._profile['battery_serial_number'])
def main(unused_argv):
basetest.main()
if __name__ == '__main__':
app.run()
| {
"content_hash": "62c86006430db9a6319437a2db2480b8",
"timestamp": "",
"source": "github",
"line_count": 184,
"max_line_length": 102,
"avg_line_length": 29.847826086956523,
"alnum_prop": 0.6671522214129643,
"repo_name": "alexandregz/simian",
"id": "cfef31db880db5288c64cd9603a594deadaceeee",
"size": "6117",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/tests/simian/mac/common/hw_test.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "37847"
},
{
"name": "HTML",
"bytes": "89696"
},
{
"name": "JavaScript",
"bytes": "28084"
},
{
"name": "Makefile",
"bytes": "8128"
},
{
"name": "Python",
"bytes": "1431095"
},
{
"name": "Shell",
"bytes": "19945"
}
],
"symlink_target": ""
} |
"""
Set of classes and functions to manipulate
ABINIT '.in' input files
ABINIT '.files' files
ABINIT '_OUT.nc' output files
"""
from . import task
from .abifiles import AbiFiles
from .input import AbinitInput, xyz2input
from .abinit import AbinitJob
from .output import AbinitOutput
from .run import AbinitRun
from .utils import psp_name
from .parser import parser
from .multibinit import Multibinit
# __all__ = filter(lambda s: not s.startswith('_'), dir())
| {
"content_hash": "eaacc5c93c3d4dcdf8e5d269b55d0837",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 58,
"avg_line_length": 23.15,
"alnum_prop": 0.7537796976241901,
"repo_name": "MaterialsDiscovery/PyChemia",
"id": "4b145ccaee7e6561b7a439fa697338ce728728d0",
"size": "463",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pychemia/code/abinit/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1390398"
},
{
"name": "Shell",
"bytes": "325"
}
],
"symlink_target": ""
} |
"""empty message
Revision ID: 32b1afcf724d
Revises: 01bc7843d696
Create Date: 2016-12-30 14:01:24.416473
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '32b1afcf724d'
down_revision = '01bc7843d696'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('check', sa.Column('parent_check_id', sa.Integer(), nullable=True))
op.add_column('check', sa.Column('read_only', sa.Boolean()))
op.execute('update "check" set read_only = false')
op.alter_column('check', 'read_only', nullable=False)
op.add_column('data_source', sa.Column('parent_data_source_id', sa.Integer(), nullable=True))
op.add_column('data_source', sa.Column('read_only', sa.Boolean()))
op.execute('update data_source set read_only = false')
op.alter_column('data_source', 'read_only', nullable=False)
op.add_column('job_template', sa.Column('parent_job_template_id', sa.Integer(), nullable=True))
op.add_column('job_template', sa.Column('read_only', sa.Boolean()))
op.execute('update job_template set read_only = false')
op.alter_column('job_template', 'read_only', nullable=False)
op.add_column('rule', sa.Column('parent_rule_id', sa.Integer(), nullable=True))
op.add_column('rule', sa.Column('read_only', sa.Boolean()))
op.execute('update rule set read_only = false')
op.alter_column('rule', 'read_only', nullable=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('rule', 'read_only')
op.drop_column('rule', 'parent_rule_id')
op.drop_column('job_template', 'read_only')
op.drop_column('job_template', 'parent_job_template_id')
op.drop_column('data_source', 'read_only')
op.drop_column('data_source', 'parent_data_source_id')
op.drop_column('check', 'read_only')
op.drop_column('check', 'parent_check_id')
# ### end Alembic commands ###
| {
"content_hash": "462125e22229d0a974e3fd904816590a",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 99,
"avg_line_length": 40.66,
"alnum_prop": 0.6714215445154943,
"repo_name": "bhgames/data_check",
"id": "caf89831a07fc9a519ccf385d264dd99ce187d2a",
"size": "2033",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "migrations/versions/32b1afcf724d_.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "448"
},
{
"name": "HTML",
"bytes": "1244"
},
{
"name": "JavaScript",
"bytes": "53976"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "119149"
},
{
"name": "Shell",
"bytes": "275"
}
],
"symlink_target": ""
} |
from metakernel import MetaKernel
from ai.backend.client.session import Session
from ai.backend.client.exceptions import BackendAPIError
class BackendKernelBase(MetaKernel):
# ref: https://github.com/ipython/ipykernel/blob/master/ipykernel/kernelbase.py
implementation = 'Backend.AI'
implementation_version = '1.1'
language = 'python'
language_version = '3'
language_info = {
'name': 'Backend.AI (base)',
'mimetype': 'text/x-python3',
'file_extension': '.py',
}
banner = 'Backend.AI Base'
backend_lang = 'python:3.6'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.log.info('Backend.AI kernel starting with client session ID: {0}'.format(self.ident))
self.backend_session = Session()
self.kernel = self.backend_session.Kernel.get_or_create(self.backend_lang, self.ident)
def do_execute_direct(self, code,
silent=False,
store_history=True,
user_expressions=None,
allow_stdin=True):
self._allow_stdin = allow_stdin
mode = 'query'
run_id = None
while True:
try:
result = self.kernel.execute(run_id, code, mode)
run_id = result['runId']
except BackendAPIError as e:
if e.status == 404:
self.Error('[Backend.AI] The kernel is not found '
'(maybe terminated due to idle/exec timeouts).')
self.Error('[Backend.AI] Please restart the kernel to run again.')
else:
self.Error('[Backend.AI] The server returned an error: '
'{0.status} {0.reason} ({0.data[title]})'
.format(e))
return
if not silent:
for item in result['console']:
if item[0] == 'stdout':
self.send_response(self.iopub_socket, 'stream', {
'name': 'stdout',
'text': item[1],
})
elif item[0] == 'stderr':
self.send_response(self.iopub_socket, 'stream', {
'name': 'stderr',
'text': item[1],
})
elif item[0] == 'media':
self.send_response(self.iopub_socket, 'display_data', {
'source': '<user-code>',
'data': {item[1][0]: item[1][1]},
})
elif item[0] == 'html':
self.send_response(self.iopub_socket, 'display_data', {
'source': '<user-code>',
'data': {'text/html': item[1]},
})
if result['status'] == 'finished':
break
elif result['status'] == 'waiting-input':
mode = 'input'
if allow_stdin:
code = self.raw_input('')
else:
code = '(user input not allowed)'
elif result['status'] in ('continued', 'build-finished'):
mode = 'continue'
code = ''
def restart_kernel(self):
pass
def do_shutdown(self, restart):
# Jupyter's restarting first destroys the kernel and then start it over again.
# We cannot use our own restarting mechanism as it produces duplicate kernels.
try:
self.kernel.destroy()
except BackendAPIError as e:
if e.status == 404:
self.log.warning('do_shutdown: missing kernel, ignoring.')
else:
self.log.exception('do_shutdown: API returned an error')
except Exception:
self.log.exception('do_shutdown: API returned an error')
finally:
self.backend_session.close()
return super().do_shutdown(restart)
def get_completions(self, info):
result = self.kernel.complete(info['code'], opts={
'row': info['line_num'],
'col': info['column'],
'line': info['line'],
'post': info['post'],
})
if result is None:
return tuple()
return result.get('completions', tuple())
class BackendPythonKernel(BackendKernelBase):
language = 'python'
language_version = '3'
language_info = {
'name': 'Python 3 on Backend.AI',
'mimetype': 'text/x-python3',
'file_extension': '.py',
'codemirror_mode': 'python',
}
banner = 'Backend (Python 3)'
backend_lang = 'python:3.6'
class BackendPythonTensorFlowKernel(BackendKernelBase):
language = 'python'
language_version = '3'
language_info = {
'name': 'TensorFlow (Python 3, CPU) on Backend.AI',
'mimetype': 'text/x-python3',
'file_extension': '.py',
'codemirror_mode': 'python',
}
banner = 'Backend (TensorFlow with Python 3)'
backend_lang = 'python-tensorflow:1.12'
class BackendPythonTorchKernel(BackendKernelBase):
language = 'python'
language_version = '3'
language_info = {
'name': 'PyTorch (Python 3, CPU) on Backend.AI',
'mimetype': 'text/x-python3',
'file_extension': '.py',
'codemirror_mode': 'python',
}
banner = 'Backend (PyTorch with Python 3)'
backend_lang = 'python-pytorch:1.0'
class BackendPythonTensorFlowGPUKernel(BackendKernelBase):
language = 'python'
language_version = '3'
language_info = {
'name': 'TensorFlow (Python 3, GPU) on Backend.AI',
'mimetype': 'text/x-python3',
'file_extension': '.py',
'codemirror_mode': 'python',
}
banner = 'Backend (GPU-accelerated TensorFlow with Python 3)'
backend_lang = 'python-tensorflow:1.12-gpu'
class BackendPythonTorchGPUKernel(BackendKernelBase):
language = 'python'
language_version = '3'
language_info = {
'name': 'PyTorch (Python 3, GPU) on Backend.AI',
'mimetype': 'text/x-python3',
'file_extension': '.py',
'codemirror_mode': 'python',
}
banner = 'Backend (GPU-accelerated PyTorch with Python 3)'
backend_lang = 'python-pytorch:1.0-gpu'
class BackendJavascriptKernel(BackendKernelBase):
language = 'javascript'
language_version = '8'
language_info = {
'name': 'Javascript (NodeJS 6) on Backend.AI',
'mimetype': 'text/javascript',
'file_extension': '.js',
'codemirror_mode': 'javascript',
}
banner = 'Backend (NodeJS 6)'
backend_lang = 'nodejs:8'
class BackendPHPKernel(BackendKernelBase):
language = 'php'
language_version = '7'
language_info = {
'name': 'PHP 7 on Backend.AI',
'mimetype': 'text/x-php',
'file_extension': '.php',
'codemirror_mode': 'php',
}
banner = 'Backend (PHP 7)'
backend_lang = 'php:7'
class BackendJuliaKernel(BackendKernelBase):
language = 'julia'
language_version = '0.6'
language_info = {
'name': 'Julia 0.6 on Backend.AI',
'mimetype': 'text/x-julia',
'file_extension': '.jl',
'codemirror_mode': 'julia',
}
banner = 'Backend (Julia 0.6)'
backend_lang = 'julia:0.6'
class BackendCKernel(BackendKernelBase):
language = 'c'
language_version = '11'
language_info = {
'name': 'C11 on Backend.AI',
'mimetype': 'text/x-csrc',
'file_extension': '.c',
'codemirror_mode': 'clike',
}
banner = 'Backend (C [gnu11])'
backend_lang = 'c:gcc6.3'
class BackendCppKernel(BackendKernelBase):
language = 'cpp'
language_version = '14'
language_info = {
'name': 'C++14 on Backend.AI',
'mimetype': 'text/x-c++src',
'file_extension': '.cc',
'codemirror_mode': 'clike',
}
banner = 'Backend (C++ [gnu++14])'
backend_lang = 'cpp:gcc6.3'
class BackendJavaKernel(BackendKernelBase):
language = 'java'
language_version = '8'
language_info = {
'name': 'Java8 on Backend.AI',
'mimetype': 'text/x-java',
'file_extension': '.java',
'codemirror_mode': 'clike',
}
banner = 'Backend (Java [openjdk8])'
backend_lang = 'java:8'
class BackendRKernel(BackendKernelBase):
language = 'r'
language_version = '3'
language_info = {
'name': 'R 3 on Backend.AI',
'mimetype': 'text/x-rsrc',
'file_extension': '.R',
'codemirror_mode': 'Rscript',
}
banner = 'Backend (R 3)'
backend_lang = 'r:3'
class BackendLuaKernel(BackendKernelBase):
language = 'lua'
language_version = '5.3'
language_info = {
'name': 'Lua 5.3 on Backend.AI',
'mimetype': 'text/x-lua',
'file_extension': '.lua',
'codemirror_mode': 'lua',
}
banner = 'Backend (Lua 5.3)'
backend_lang = 'lua:5.3'
kernels = [
BackendPythonKernel,
BackendPythonTorchKernel,
BackendPythonTorchGPUKernel,
BackendPythonTensorFlowKernel,
BackendPythonTensorFlowGPUKernel,
BackendJavascriptKernel,
BackendPHPKernel,
BackendJuliaKernel,
BackendCKernel,
BackendCppKernel,
BackendJavaKernel,
BackendRKernel,
BackendLuaKernel,
]
| {
"content_hash": "b4ec6bc74915ee88a4dd154e75d9cefd",
"timestamp": "",
"source": "github",
"line_count": 328,
"max_line_length": 98,
"avg_line_length": 29.00609756097561,
"alnum_prop": 0.5389951650199706,
"repo_name": "lablup/sorna-jupyter-kernel",
"id": "374752513777f686fe44dc2e7375a8d35e8305cb",
"size": "9514",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/ai/backend/integration/jupyter/kernel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14204"
}
],
"symlink_target": ""
} |
import datetime
import unittest
from search.ql import Query, Q, GeoQueryArguments
from search.fields import TextField, GeoField, DateField
from search.indexes import DocumentModel
class FakeDocument(DocumentModel):
foo = TextField()
bar = DateField()
class FakeGeoDocument(DocumentModel):
my_loc = GeoField()
class TestKeywordQuery(unittest.TestCase):
def test_basic_keywords(self):
query = Query(FakeDocument)
query.add_keywords("foo bar")
self.assertEqual(
u"foo bar",
unicode(query))
class TestQuery(unittest.TestCase):
def test_basic_keywords(self):
query = Query(FakeDocument)
query.add_q(Q(foo__gt=42))
self.assertEqual(
u"(foo > 42)",
unicode(query))
def test_add_q_or(self):
"""Test that two Q objects can be added to a query without needing to wrap them in
another Q object
"""
query = Query(FakeDocument)
q_1 = Q(foo=42)
q_2 = Q(foo=128)
query.add_q(q_1)
query.add_q(q_2, conn=Q.OR)
self.assertEqual(
u'((foo:"42") OR (foo:"128"))',
unicode(query))
class TestGeoQuery(unittest.TestCase):
def test_geosearch(self):
query = Query(FakeGeoDocument)
query.add_q(Q(my_loc__geo=GeoQueryArguments(3.14, 6.28, 20)))
self.assertEqual(
u"(distance(my_loc, geopoint(3.140000, 6.280000)) < 20)",
unicode(query))
def test_geosearch_lt(self):
query = Query(FakeGeoDocument)
query.add_q(Q(my_loc__geo_lt=GeoQueryArguments(3.14, 6.28, 20)))
self.assertEqual(
u"(distance(my_loc, geopoint(3.140000, 6.280000)) < 20)",
unicode(query))
def test_geosearch_lte(self):
query = Query(FakeGeoDocument)
query.add_q(Q(my_loc__geo_lte=GeoQueryArguments(3.14, 6.28, 20)))
self.assertEqual(
u"(distance(my_loc, geopoint(3.140000, 6.280000)) <= 20)",
unicode(query))
def test_geosearch_gt(self):
query = Query(FakeGeoDocument)
query.add_q(Q(my_loc__geo_gt=GeoQueryArguments(3.14, 6.28, 20)))
self.assertEqual(
u"(distance(my_loc, geopoint(3.140000, 6.280000)) > 20)",
unicode(query))
def test_geosearch_gte(self):
query = Query(FakeGeoDocument)
query.add_q(Q(my_loc__geo_gte=GeoQueryArguments(3.14, 6.28, 20)))
self.assertEqual(
u"(distance(my_loc, geopoint(3.140000, 6.280000)) >= 20)",
unicode(query))
class TestDateQuery(unittest.TestCase):
def test_before(self):
query = Query(FakeDocument)
today = datetime.date.today()
query.add_q(Q(bar__lt=today))
self.assertEqual(
u"(bar < {0})".format(today.isoformat()),
unicode(query))
def test_after(self):
query = Query(FakeDocument)
today = datetime.date.today()
query.add_q(Q(bar__gt=today))
self.assertEqual(
u"(bar > {0} AND NOT bar:{1})".format(today.isoformat(), DateField().none_value()),
unicode(query))
| {
"content_hash": "f6d1b97fa1b151c2ec3c76fae4dedffb",
"timestamp": "",
"source": "github",
"line_count": 110,
"max_line_length": 95,
"avg_line_length": 28.745454545454546,
"alnum_prop": 0.5901328273244781,
"repo_name": "potatolondon/search",
"id": "d7e02d5ebf742a816f23d80e49dc3288f51f2f6b",
"size": "3162",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "search/tests/test_ql.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "150121"
},
{
"name": "Shell",
"bytes": "146"
}
],
"symlink_target": ""
} |
"""Generate a phylo.xml from a MUSCLE MSA.fasta
For usage use 'python phyloxml_from_msa.py -h'
"""
import argparse
##### PARSE ARGUMENTS #####
argparser = argparse.ArgumentParser()
argparser.add_argument("--msa", dest="msa", required=True, metavar="MSA_FILE",
help="path to a MUSCLE_msa.fasta to create phylo from")
argparser.add_argument("--dest", dest="dest", required=True,
help="path to an output phylo.xml")
args = argparser.parse_args()
##### END PARSE ARGUMENTS #####
def phyloxml_from_msa(msa, phyloxml):
from Bio import AlignIO
from Bio.Phylo.TreeConstruction import DistanceCalculator
from Bio.Phylo.TreeConstruction import DistanceTreeConstructor
from Bio import Phylo
ms_alignment = AlignIO.read(msa, "fasta")
calculator = DistanceCalculator("ident")
dist_matrix = calculator.get_distance(ms_alignment)
constructor = DistanceTreeConstructor()
tree = constructor.upgma(dist_matrix)
Phylo.write(tree, phyloxml, "phyloxml")
if __name__ == "__main__":
msa = args.msa
phyloxml = args.dest
phyloxml_from_msa(msa, phyloxml)
| {
"content_hash": "25e1389826ca331b35929966045e0d52",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 78,
"avg_line_length": 30.72972972972973,
"alnum_prop": 0.6807387862796834,
"repo_name": "daniel0128/receptormarker",
"id": "ba8184b02776013d0005b7c2a815bf778dc56704",
"size": "1137",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "inst/py/phyloxml_from_msa.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "7942318"
},
{
"name": "C++",
"bytes": "9907"
},
{
"name": "CSS",
"bytes": "835"
},
{
"name": "Groff",
"bytes": "287733"
},
{
"name": "JavaScript",
"bytes": "436399"
},
{
"name": "Perl",
"bytes": "946990"
},
{
"name": "PostScript",
"bytes": "5655"
},
{
"name": "Python",
"bytes": "1137"
},
{
"name": "R",
"bytes": "186901"
},
{
"name": "Rebol",
"bytes": "38"
},
{
"name": "Shell",
"bytes": "27336"
},
{
"name": "TeX",
"bytes": "293983"
}
],
"symlink_target": ""
} |
"""
Demo platform for the geo location component.
For more details about this platform, please refer to the documentation
https://home-assistant.io/components/demo/
"""
import logging
import random
from datetime import timedelta
from math import pi, cos, sin, radians
from typing import Optional
from homeassistant.components.geo_location import GeoLocationEvent
from homeassistant.helpers.event import track_time_interval
_LOGGER = logging.getLogger(__name__)
AVG_KM_PER_DEGREE = 111.0
DEFAULT_UNIT_OF_MEASUREMENT = "km"
DEFAULT_UPDATE_INTERVAL = timedelta(minutes=1)
MAX_RADIUS_IN_KM = 50
NUMBER_OF_DEMO_DEVICES = 5
EVENT_NAMES = ["Bushfire", "Hazard Reduction", "Grass Fire", "Burn off",
"Structure Fire", "Fire Alarm", "Thunderstorm", "Tornado",
"Cyclone", "Waterspout", "Dust Storm", "Blizzard", "Ice Storm",
"Earthquake", "Tsunami"]
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Demo geo locations."""
DemoManager(hass, add_entities)
class DemoManager:
"""Device manager for demo geo location events."""
def __init__(self, hass, add_entities):
"""Initialise the demo geo location event manager."""
self._hass = hass
self._add_entities = add_entities
self._managed_devices = []
self._update(count=NUMBER_OF_DEMO_DEVICES)
self._init_regular_updates()
def _generate_random_event(self):
"""Generate a random event in vicinity of this HA instance."""
home_latitude = self._hass.config.latitude
home_longitude = self._hass.config.longitude
# Approx. 111km per degree (north-south).
radius_in_degrees = random.random() * MAX_RADIUS_IN_KM / \
AVG_KM_PER_DEGREE
radius_in_km = radius_in_degrees * AVG_KM_PER_DEGREE
angle = random.random() * 2 * pi
# Compute coordinates based on radius and angle. Adjust longitude value
# based on HA's latitude.
latitude = home_latitude + radius_in_degrees * sin(angle)
longitude = home_longitude + radius_in_degrees * cos(angle) / \
cos(radians(home_latitude))
event_name = random.choice(EVENT_NAMES)
return DemoGeoLocationEvent(event_name, radius_in_km, latitude,
longitude, DEFAULT_UNIT_OF_MEASUREMENT)
def _init_regular_updates(self):
"""Schedule regular updates based on configured time interval."""
track_time_interval(self._hass, lambda now: self._update(),
DEFAULT_UPDATE_INTERVAL)
def _update(self, count=1):
"""Remove events and add new random events."""
# Remove devices.
for _ in range(1, count + 1):
if self._managed_devices:
device = random.choice(self._managed_devices)
if device:
_LOGGER.debug("Removing %s", device)
self._managed_devices.remove(device)
self._hass.add_job(device.async_remove())
# Generate new devices from events.
new_devices = []
for _ in range(1, count + 1):
new_device = self._generate_random_event()
_LOGGER.debug("Adding %s", new_device)
new_devices.append(new_device)
self._managed_devices.append(new_device)
self._add_entities(new_devices)
class DemoGeoLocationEvent(GeoLocationEvent):
"""This represents a demo geo location event."""
def __init__(self, name, distance, latitude, longitude,
unit_of_measurement):
"""Initialize entity with data provided."""
self._name = name
self._distance = distance
self._latitude = latitude
self._longitude = longitude
self._unit_of_measurement = unit_of_measurement
@property
def name(self) -> Optional[str]:
"""Return the name of the event."""
return self._name
@property
def should_poll(self):
"""No polling needed for a demo geo location event."""
return False
@property
def distance(self) -> Optional[float]:
"""Return distance value of this external event."""
return self._distance
@property
def latitude(self) -> Optional[float]:
"""Return latitude value of this external event."""
return self._latitude
@property
def longitude(self) -> Optional[float]:
"""Return longitude value of this external event."""
return self._longitude
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit_of_measurement
| {
"content_hash": "f1a5de84b3bdba786ed61732f7295aa9",
"timestamp": "",
"source": "github",
"line_count": 131,
"max_line_length": 79,
"avg_line_length": 35.66412213740458,
"alnum_prop": 0.6247859589041096,
"repo_name": "persandstrom/home-assistant",
"id": "ddec369e696aa61b8e3caacc89e36172ff083113",
"size": "4672",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "homeassistant/components/geo_location/demo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1067"
},
{
"name": "Python",
"bytes": "11745210"
},
{
"name": "Ruby",
"bytes": "518"
},
{
"name": "Shell",
"bytes": "16652"
}
],
"symlink_target": ""
} |
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from subprocess import Popen, PIPE, call
import os
import getpass
import sys
class AMPLauncher(Gtk.Window):
def __init__(self):
#final command variables
self.displayMode = ""
self.presets = []
self.lavdoptsToggle = False
self.lavdoptsThreads = 1
Gtk.Window.__init__(self, title="AMPLauncher")
#main box holding GUI elements
self.mainBox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL,
homogeneous=False,
spacing=6)
self.add(self.mainBox)
#set up all GUI elements
self.set_up_interface()
self.load_presets()
def set_up_interface(self):
#Filepath
self.filePathEntry = Gtk.Entry()
self.chooseFileBtn = Gtk.Button(label="Open File")
self.fileGrid = Gtk.Grid()
self.filePathEntry.set_hexpand(True)
self.fileGrid.attach(self.filePathEntry,
0,
0,
2,
1)
self.fileGrid.attach_next_to(self.chooseFileBtn,
self.filePathEntry,
Gtk.PositionType.RIGHT,
1,
1)
self.mainBox.add(self.fileGrid)
#ao/vo combo boxes
self.voLabel = Gtk.Label("Select video driver:")
self.aoLabel = Gtk.Label("Select audio driver:")
self.voComboBox = Gtk.ComboBoxText()
self.aoComboBox = Gtk.ComboBoxText()
self.set_combo_box(self.voComboBox, self.get_vo_ao("vo"))
self.set_combo_box(self.aoComboBox, self.get_vo_ao("ao"))
self.comboBox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL,
spacing=6)
self.comboBox.add(self.voLabel)
self.comboBox.add(self.voComboBox)
self.comboBox.add(self.aoLabel)
self.comboBox.add(self.aoComboBox)
self.mainBox.add(self.comboBox)
#other options
self.otherLabel = Gtk.Label("Other options:")
##Display mode
self.wndRadioBtn = Gtk.RadioButton.new_from_widget(None)
self.wndRadioBtn.set_label("Windowed")
self.fsRadioBtn = Gtk.RadioButton.new_from_widget(self.wndRadioBtn)
self.fsRadioBtn.set_label("Fullscreen")
self.zmRadioBtn = Gtk.RadioButton.new_from_widget(self.wndRadioBtn)
self.zmRadioBtn.set_label("Zoomed")
##lavdopts
self.useLavdoptsBtn = Gtk.CheckButton("Use lavdopts")
self.threadsLabel = Gtk.Label("Threads(MPEG-1/2 and H.264 only):")
self.threadsSpinBtn = Gtk.SpinButton()
adjustment = Gtk.Adjustment(1, 1, self.get_threads(), 1)
self.threadsSpinBtn.set_adjustment(adjustment)
self.threadsSpinBtn.set_sensitive(False)
#presets
self.presetsLabel = Gtk.Label("Presets:")
self.presetsComboBox = Gtk.ComboBoxText()#text loaded in load_presets()
self.savePresetBtn = Gtk.Button("Save preset")
self.runPresetBtn = Gtk.Button("Run preset")
self.otherBox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL,
spacing=6)
self.radioBox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL,
spacing=6)
self.threadsBox = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL,
spacing=6)
self.otherBox.add(self.otherLabel)
self.radioBox.add(self.wndRadioBtn)
self.radioBox.add(self.fsRadioBtn)
self.radioBox.add(self.zmRadioBtn)
self.otherBox.add(self.radioBox)
self.otherBox.add(self.useLavdoptsBtn)
self.threadsBox.add(self.threadsLabel)
self.threadsBox.add(self.threadsSpinBtn)
self.otherBox.add(self.threadsBox)
self.presetsBox = Gtk.Box(orientation=Gtk.Orientation.VERTICAL,
spacing=6)
self.presetBtnBox = Gtk.Box(Gtk.Orientation.HORIZONTAL,
spacing=6)
self.presetBtnBox.add(self.savePresetBtn)
self.presetBtnBox.add(self.runPresetBtn)
self.presetsBox.add(self.presetsLabel)
self.presetsBox.add(self.presetsComboBox)
self.presetsBox.add(self.presetBtnBox)
self.otherBox.add(self.presetsBox)
self.mainBox.add(self.otherBox)
#Play btn
self.playBtn = Gtk.Button("Play")
self.mainBox.add(self.playBtn)
#Connect all events
self.connect_interface()
def connect_interface(self):
self.chooseFileBtn.connect("clicked",
self.on_open_file_button_clicked)
self.wndRadioBtn.connect("toggled",
self.on_display_mode_toggled,
"")
self.fsRadioBtn.connect("toggled",
self.on_display_mode_toggled,
"-fs")
self.zmRadioBtn.connect("toggled",
self.on_display_mode_toggled,
"-zoom")
self.useLavdoptsBtn.connect("toggled",
self.on_use_lavdopts_toggle)
self.threadsSpinBtn.connect("value-changed",
self.on_spin_button_value_changed)
self.savePresetBtn.connect("clicked",
self.on_save_preset_button_clicked)
self.runPresetBtn.connect("clicked",
self.on_run_preset_button_clicked)
self.playBtn.connect("clicked",
self.on_play_button_clicked)
def load_presets(self):
self.path = "/home/" + getpass.getuser() + "/.AMPLauncher"
if os.path.isdir(self.path):
for file in os.listdir(self.path):
self.presetsComboBox.append_text(file)
self.presets.append(open(self.path + "/" + file).read())
else:
os.makedirs(self.path)
def set_combo_box(self, comboBox, entries):
for entry in entries:
comboBox.append_text(entry)
def get_vo_ao(self, opt):
#get available vo/ao drivers from mplayer command
args = ["mplayer", "-" + opt, "help"]
out, err = Popen(args, stdout=PIPE).communicate()
output = out.decode("utf-8")
output = output.replace("\t", " ")
index = output.find(":")
o = str.split(output[index+2:], '\n')
if opt == "vo":
self.vo = o[:-2]
elif opt == "ao":
self.ao = o[:-2]
return o[:-2]
def get_vo_ao_value(self, opt):
#get vo/ao active combo box value
if opt == "vo":
value = self.vo[self.voComboBox.get_active()]
elif opt == "ao":
value = self.ao[self.aoComboBox.get_active()]
index = value[2:].find(" ")+2
return value[1:index]
def get_threads(self):
#get number of computer's threads using nproc command
args = ["nproc", "--all"]
out, err = Popen(args, stdout=PIPE).communicate()
return int(out.decode("utf-8"))
def on_open_file_button_clicked(self, widget):
dialog = Gtk.FileChooserDialog("Please choose a video file",
self,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN,
Gtk.ResponseType.OK))
self.add_filters(dialog)
response = dialog.run()
if response == Gtk.ResponseType.OK:
self.filePathEntry.set_text(dialog.get_filename())
elif response == Gtk.ResponseType.CANCEL:
print("Cancel clicked")
dialog.destroy()
def add_filters(self, dialog):
#Filters for file dialog
filter_all = Gtk.FileFilter()
filter_all.set_name("All files")
filter_all.add_pattern("*")
dialog.add_filter(filter_all)
def on_use_lavdopts_toggle(self, button):
isSensitive = self.threadsSpinBtn.get_property('sensitive')
if isSensitive:
self.threadsSpinBtn.set_sensitive(False)
self.lavdoptsToggle = False
else:
self.threadsSpinBtn.set_sensitive(True)
self.lavdoptsToggle = True
def on_display_mode_toggled(self, button, name):
self.displayMode = name
def on_spin_button_value_changed(self, button):
self.lavdoptsThreads = self.threadsSpinBtn.get_value()
def on_save_preset_button_clicked(self, button):
dialog = Gtk.MessageDialog(self, 0, Gtk.MessageType.QUESTION,
Gtk.ButtonsType.OK, "Enter preset name:")
dialogBox = dialog.get_content_area()
name = "default"
entry = Gtk.Entry()
entry.set_text(name)
entry.show()
dialogBox.add(entry)
response = dialog.run()
if response == Gtk.ResponseType.OK:
name = entry.get_text()
dialog.destroy()
self.save_preset(name)
def on_run_preset_button_clicked(self, button):
cmd = self.presets[self.presetsComboBox.get_active()]
args = str(cmd).split()
print(cmd)
args.insert(1, self.filePathEntry.get_text())
print(args)
call(args)
def save_preset(self, name):
#save preset in form of a command (string)
file = open(self.path + "/" + name, "w")
cmd = ""
for item in self.get_args():
cmd += item + " "
file.write(cmd)
file.close()
self.load_presets()
def get_args(self):
#get an array with all arguments - final command w/o filename
args = ["mplayer",
"-ao", self.get_vo_ao_value("ao"),
"-vo", self.get_vo_ao_value("vo"),
self.displayMode]
if self.lavdoptsToggle:
args.append("-lavdopts")
args.append("threads=" + str(int(self.lavdoptsThreads)))
return args
def on_play_button_clicked(self, button):
#get arguments and add filename, then execute
args = self.get_args()
args.insert(1, self.filePathEntry.get_text())
call(args)
def parse_arguments():
if sys.argv[1] == "-h":
print("Read README on github page")
elif sys.argv[1] == "-v":
print("AMPLauncher v1.0 Copyright 2016 by BlinkBP")
if len(sys.argv) > 1:
parse_arguments()
else:
win = AMPLauncher()
win.connect("delete-event", Gtk.main_quit)
win.show_all()
Gtk.main()
| {
"content_hash": "8b4f3caa22b61e343547d3d8d8971032",
"timestamp": "",
"source": "github",
"line_count": 277,
"max_line_length": 79,
"avg_line_length": 39.761732851985556,
"alnum_prop": 0.5501180315961504,
"repo_name": "BlinkBP/AMPLauncher",
"id": "f16dce05cc1754ba14f52a6b85aeb54e01aa7ab4",
"size": "11038",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AMPLauncher.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "11038"
}
],
"symlink_target": ""
} |
"""
Panel for showing node properties
"""
import wx
import inspect
from ...nodes import MDFNode
class NodeProperties(wx.Panel):
"""
Panel that shows the node name and a few properties including:
- node type
- module
- filename
- line number
"""
def __init__(self, parent, id=wx.ID_ANY):
wx.Panel.__init__(self, parent, id)
# static text boxes for the node properties
self.title = wx.StaticText(self, label=" " * 50)
self.title.SetFont(wx.Font(12, wx.SWISS, wx.NORMAL, wx.BOLD))
self.nodetype = wx.StaticText(self, label="?")
self.valuetype = wx.StaticText(self, label="?")
self.categories = wx.StaticText(self, label="?")
self.modulename = wx.StaticText(self, label="?")
self.path = wx.StaticText(self, label="?")
self.line = wx.StaticText(self, label="?")
self.sizer = sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(self.title, 0, wx.ALL | wx.EXPAND, border=10)
grid_sizer = wx.GridBagSizer(vgap=3, hgap=10)
grid_sizer.Add(wx.StaticText(self, label="Node Type:"), (0, 0))
grid_sizer.Add(self.nodetype, (0, 1))
grid_sizer.Add(wx.StaticText(self, label="Value Type:"), (1, 0))
grid_sizer.Add(self.valuetype, (1, 1))
grid_sizer.Add(wx.StaticText(self, label="Categories:"), (2, 0))
grid_sizer.Add(self.categories, (2, 1))
grid_sizer.Add(wx.StaticText(self, label="Module:"), (3, 0))
grid_sizer.Add(self.modulename, (3, 1))
grid_sizer.Add(wx.StaticText(self, label="Filename:"), (4, 0))
grid_sizer.Add(self.path, (4, 1))
grid_sizer.Add(wx.StaticText(self, label="Line:"), (5, 0))
grid_sizer.Add(self.line, (5, 1))
sizer.Add(grid_sizer, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, border=25)
self.ctx_title = wx.StaticText(self, label="")
self.ctx_title.SetFont(wx.Font(10, wx.SWISS, wx.NORMAL, wx.BOLD))
sizer.Add(self.ctx_title, 0, wx.ALL | wx.EXPAND, border=10)
self.ctx_grid_sizer = wx.GridBagSizer(vgap=3, hgap=10)
sizer.Add(self.ctx_grid_sizer, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, border=25)
self.SetSizer(wx.BoxSizer(wx.VERTICAL))
self.GetSizer().Add(sizer, 0, wx.EXPAND)
self.Fit()
# hide everything initially
self.GetSizer().Hide(sizer, recursive=True)
def SetNode(self, node, ctx):
"""update the panel with values from a node"""
self.Freeze()
try:
self.title.SetLabel(node.short_name)
self.nodetype.SetLabel(node.node_type or "?")
self.modulename.SetLabel(node.modulename or "?")
self.path.SetLabel("?")
self.line.SetLabel("?")
categories = ", ".join([str(x) for x in node.categories if x])
self.categories.SetLabel(categories)
# get the type of the current value
valuetype = None
if node.has_value(ctx) and not node.is_dirty(ctx):
value = ctx[node]
valuetype = repr(type(value))
if hasattr(value, "__class__"):
valuetype = getattr(value.__class__, "__name__", repr(value.__class__))
self.valuetype.SetLabel(valuetype or "?")
try:
# get the module and line number using inspect
path = inspect.getsourcefile(node.func)
self.path.SetLabel(path)
source, line = inspect.findsource(node.func)
self.line.SetLabel(str(line) if line > 0 else "?")
except (IOError, TypeError, AttributeError):
# if that fails try getting the module name from the node
try:
if node.modulename:
module = __import__(node.modulename)
self.path.SetLabel(module.__file__)
except (ImportError, AttributeError):
pass
# show the shift set if there is one
shift_set = ctx.get_shift_set()
self.ctx_grid_sizer.Clear(True)
if shift_set:
self.ctx_title.SetLabel("ctx shift set:")
for i, n in enumerate(sorted(shift_set.iterkeys(), key=lambda x: x.short_name)):
# convert the shift value to some sensible looking string
shift_value = shift_set[n]
if isinstance(shift_value, basestring):
shift_value = repr(shift_value)
if isinstance(shift_value, MDFNode):
shift_value = shift_value.short_name
if isinstance(shift_value, float):
shift_value = "%.9f" % shift_value
shift_value = str(shift_value)
if "\n" in shift_value:
shift_value = shift_value.split("\n", 1)[0] + "..."
if len(shift_value) > 100:
shift_value = shift_value[:100] + "..."
# add a row to the grid
self.ctx_grid_sizer.Add(wx.StaticText(self, label=n.short_name), (i, 0))
self.ctx_grid_sizer.Add(wx.StaticText(self, label="="), (i, 1))
self.ctx_grid_sizer.Add(wx.StaticText(self, label=str(shift_value)), (i, 2))
else:
self.ctx_title.SetLabel("")
self.GetSizer().Show(self.sizer, recursive=True)
self.GetSizer().Layout()
finally:
self.Thaw()
| {
"content_hash": "fec032fc0b1df90a23ea759743cad58a",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 96,
"avg_line_length": 39.851063829787236,
"alnum_prop": 0.5408435664709023,
"repo_name": "tonyroberts/mdf",
"id": "0ac2215c5c60f0e7cb5d8ebdc1b711d4a7609d6c",
"size": "5619",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "mdf/viewer/panels/propspanel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "527023"
}
],
"symlink_target": ""
} |
import dropbox
import datetime
def getPresent():
#obtain and convert the server time to year, month, date
current_time = datetime.datetime.now()
current_year = current_time .year
current_month = current_time .month
current_day = current_time .day
present = datetime.datetime(current_year, current_month, current_day)
return present
def getTimeDifferenceInSeconds(present, past):
d = present - past
return d.total_seconds()
def getDeletionApproval(timeDifferenceInSeconds):
# 1, 209, 600 seconds in 14 days (60 x 60 x 24 x 14)
# 864, 000 seconds in 10 days (60 x 60 x 24 x 10)
if timeDifferenceInSeconds > 864000:
return True
def deleteTheFile(pathToFile):
client.file_delete(pathToFile)
#get the current date
present = getPresent()
#access the drop box account
client = dropbox.client.DropboxClient("INSERT_API_KEY")
#get a dict of all files in the specific dropbox api folder
folder_metadata = client.metadata('/')
#load the contents dict (actual list of files within the app folder) into a list variable
md = folder_metadata["contents"]
#check that we don't delete files in the even that the files have stopped being placed in Dropbox by the back up script (want at least 5 files on hand)
#TODO perhaps find the difference in seconds for all files and delete from oldest to newest until either all old ones are gone or there are only 5 of the newest remaining
if len(md) > 5:
#loop through the contents
for item in md:
#reset deletion approval to false
deletionApproval = False
#obtain and convert dropbox date to year, month, date
t1 = datetime.datetime.strptime(item["modified"],'%a, %d %b %Y %H:%M:%S +0000')
year = t1.year
month = t1.month
day = t1.day
past = datetime.datetime(year, month, day)
#get the time difference in seconds
timeDifferenceInSeconds = getTimeDifferenceInSeconds(present, past)
#get approval to delete (default file must be 14 days or older)
deletionApproval = getDeletionApproval(timeDifferenceInSeconds)
if deletionApproval == True:
deleteTheFile(item["path"])
| {
"content_hash": "9a198b42dbe0b46fe31361e8baa91423",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 170,
"avg_line_length": 42.38181818181818,
"alnum_prop": 0.6623766623766624,
"repo_name": "tpmccallum/python-cleanup-of-dropbox",
"id": "0b91a93669c1c589d0f449bdea4a0f224352d2e1",
"size": "2340",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "remove_files.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2340"
}
],
"symlink_target": ""
} |
from django.contrib.sites.models import Site
from django import forms
from event.models import Calendar, Event
class AdminAddCalendarForm(forms.ModelForm):
sites = forms.ModelMultipleChoiceField(
Site.objects.all(), initial=[Site.objects.get_current()]
)
class Meta(object):
model = Calendar
class AdminAddEventForm(forms.ModelForm):
sites = forms.ModelMultipleChoiceField(
Site.objects.all(), initial=Site.objects.all()
)
class Meta(object):
model = Event
| {
"content_hash": "e6bbf9655bee669c0e919b3b7f546198",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 64,
"avg_line_length": 23.636363636363637,
"alnum_prop": 0.7038461538461539,
"repo_name": "chrischambers/django-calendartools",
"id": "a0eaf7ad8adec4a3f72c2497831e133deeb6ea50",
"size": "520",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "projects/multi_site/event/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "300064"
}
],
"symlink_target": ""
} |
from telemetry import story
from page_sets import google_pages
STARTUP_TIME_IN_SECONDS = 2
IDLE_TIME_IN_SECONDS = 100
def _CreateIdlePageClass(base_page_cls):
class DerivedIdlePage(base_page_cls): # pylint: disable=no-init
def RunPageInteractions(self, action_runner):
action_runner.Wait(STARTUP_TIME_IN_SECONDS)
with action_runner.CreateInteraction('Begin'):
action_runner.tab.browser.DumpMemory()
with action_runner.CreateInteraction('Idle'):
action_runner.Wait(IDLE_TIME_IN_SECONDS)
with action_runner.CreateInteraction('End'):
action_runner.tab.browser.DumpMemory()
return DerivedIdlePage
def _CreateIdleBackgroundPageClass(base_page_cls):
class DerivedIdlePage(base_page_cls): # pylint: disable=no-init
def RunPageInteractions(self, action_runner):
action_runner.tab.browser.tabs.New()
action_runner.Wait(IDLE_TIME_IN_SECONDS)
return DerivedIdlePage
class LongRunningIdleGmailPageSet(story.StorySet):
def __init__(self):
super(LongRunningIdleGmailPageSet, self).__init__(
archive_data_file='data/long_running_idle_gmail_page.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
self.AddStory(
_CreateIdlePageClass(google_pages.GmailPage)(self))
class LongRunningIdleGmailBackgroundPageSet(story.StorySet):
def __init__(self):
# Reuse the wpr of foreground gmail.
super(LongRunningIdleGmailBackgroundPageSet, self).__init__(
archive_data_file='data/long_running_idle_gmail_page.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
self.AddStory(
_CreateIdleBackgroundPageClass(google_pages.GmailPage)(self))
| {
"content_hash": "553f4ecc89b572a8278120718bfb2910",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 69,
"avg_line_length": 37,
"alnum_prop": 0.7327327327327328,
"repo_name": "junhuac/MQUIC",
"id": "9b46f8c2c96948e5f7db9439595ff795a735977a",
"size": "1828",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "src/tools/perf/page_sets/long_running_idle_google_cases.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "ApacheConf",
"bytes": "25707"
},
{
"name": "Assembly",
"bytes": "5386"
},
{
"name": "Batchfile",
"bytes": "42909"
},
{
"name": "C",
"bytes": "1168925"
},
{
"name": "C#",
"bytes": "81308"
},
{
"name": "C++",
"bytes": "43919800"
},
{
"name": "CMake",
"bytes": "46379"
},
{
"name": "CSS",
"bytes": "19668"
},
{
"name": "Emacs Lisp",
"bytes": "32613"
},
{
"name": "Go",
"bytes": "7247"
},
{
"name": "Groff",
"bytes": "127224"
},
{
"name": "HTML",
"bytes": "2548385"
},
{
"name": "Java",
"bytes": "1332462"
},
{
"name": "JavaScript",
"bytes": "851006"
},
{
"name": "M4",
"bytes": "29823"
},
{
"name": "Makefile",
"bytes": "459525"
},
{
"name": "Objective-C",
"bytes": "120158"
},
{
"name": "Objective-C++",
"bytes": "330017"
},
{
"name": "PHP",
"bytes": "11283"
},
{
"name": "Protocol Buffer",
"bytes": "2991"
},
{
"name": "Python",
"bytes": "16872234"
},
{
"name": "R",
"bytes": "1842"
},
{
"name": "Ruby",
"bytes": "937"
},
{
"name": "Shell",
"bytes": "764509"
},
{
"name": "Swift",
"bytes": "116"
},
{
"name": "VimL",
"bytes": "12288"
},
{
"name": "nesC",
"bytes": "14779"
}
],
"symlink_target": ""
} |
import datetime
import json
import decimal
from django.http import HttpResponse
from django.db import models
from django.core import serializers
from django.db.models import query
from django.forms.models import model_to_dict
from django.conf import settings
from django.contrib.auth.decorators import login_required
def configurable_login_required(function):
def wrapper(*args, **kw):
if globals()['settings'].WEB_LOCKDOWN:
curried_page = login_required(function)
return curried_page(*args, **kw)
else:
return function(*args, **kw)
return wrapper
class jsonres(object):
def __init__(self, f):
self._f = f
def __call__(self, *args, **kwargs):
res = self._f(*args, **kwargs)
if isinstance(res, query.QuerySet):
j = serializers.serialize('json', res)
else:
j = json.dumps(res, indent=2, cls=JSONEncoder)
return HttpResponse(j, content_type='application/json')
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o)
if isinstance(o, datetime.date):
return '%i-%02i-%02i' % (o.year, o.month, o.day)
if isinstance(o, datetime.time):
return '%i:%s' % (o.hour, o.minute)
if isinstance(o, models.Model):
return model_to_dict(o)
return super(JSONEncoder, self).default(o)
| {
"content_hash": "7aff093424dab44cca9501161c4017c1",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 63,
"avg_line_length": 32.17777777777778,
"alnum_prop": 0.6312154696132597,
"repo_name": "davidrenne/dr_django_tools",
"id": "05c2358e8e1f124fae31121d2d2df9ca16f538b6",
"size": "1448",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dr_django_tools/shared/django/urlutils.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "1728"
},
{
"name": "Python",
"bytes": "46657"
}
],
"symlink_target": ""
} |
'''
Error classes and redis parser function
'''
import pulsar
from .pyparser import Parser
class RedisError(pulsar.PulsarException):
'''Redis Error Base class'''
pass
class CommandError(RedisError):
pass
class ResponseError(RedisError):
pass
class InvalidResponse(RedisError):
pass
class NoScriptError(ResponseError):
pass
EXCEPTION_CLASSES = {
'ERR': ResponseError,
'NOSCRIPT': NoScriptError,
}
def response_error(response):
"Parse an error response"
response = response.split(' ')
error_code = response[0]
if error_code not in EXCEPTION_CLASSES:
error_code = 'ERR'
response = ' '.join(response[1:])
return EXCEPTION_CLASSES[error_code](response)
def PyRedisParser():
return Parser(InvalidResponse, response_error)
if pulsar.HAS_C_EXTENSIONS:
from pulsar.utils.lib import RedisParser as _RedisParser
def RedisParser():
return _RedisParser(InvalidResponse, response_error)
else: # pragma nocover
RedisParser = PyRedisParser
def redis_parser(py_redis_parser=False):
return PyRedisParser if py_redis_parser else RedisParser
| {
"content_hash": "291e0983e1286a7d69d62e35f5fabdb8",
"timestamp": "",
"source": "github",
"line_count": 61,
"max_line_length": 60,
"avg_line_length": 18.688524590163933,
"alnum_prop": 0.7087719298245614,
"repo_name": "dejlek/pulsar",
"id": "e963d012611ac950b225d1c9b42b70a898fa3d6b",
"size": "1140",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "pulsar/apps/ds/parser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "553"
},
{
"name": "C++",
"bytes": "1517"
},
{
"name": "CSS",
"bytes": "1302"
},
{
"name": "HTML",
"bytes": "1085"
},
{
"name": "JavaScript",
"bytes": "116"
},
{
"name": "Python",
"bytes": "1149959"
}
],
"symlink_target": ""
} |
"""
@file ion/services/sa/instrument/test/test_instrument_management_service.py
@author Ian Katz
@test ion.services.sa.instrument.instrument_management_service Unit test suite to cover all service code
"""
#from mock import Mock #, sentinel, patch
from ion.services.sa.instrument.instrument_management_service import InstrumentManagementService
from ion.services.sa.test.helpers import UnitTestGenerator
from nose.plugins.attrib import attr
from ooi.logging import log
#from pyon.core.exception import BadRequest, Conflict, Inconsistent, NotFound
import unittest
from pyon.ion.resource import RT
from pyon.util.unit_test import PyonTestCase
unittest # block pycharm inspection
@attr('UNIT', group='sa')
class TestInstrumentManagement(PyonTestCase):
def setUp(self):
self.mock_ionobj = self._create_IonObject_mock('ion.services.sa.instrument.instrument_management_service.IonObject')
#self.mock_ionobj = IonObject
mock_clients = self._create_service_mock('instrument_management')
self.instrument_mgmt_service = InstrumentManagementService()
self.instrument_mgmt_service.clients = mock_clients
# must call this manually
self.instrument_mgmt_service.on_init()
self.addCleanup(delattr, self, "instrument_mgmt_service")
self.addCleanup(delattr, self, "mock_ionobj")
#self.resource_impl_cleanup()
log.debug("setUp complete")
#def resource_impl_cleanup(self):
#pass
utg = UnitTestGenerator(TestInstrumentManagement,
InstrumentManagementService)
utg.test_all_in_one(True)
utg.add_resource_unittests(RT.InstrumentAgentInstance, "instrument_agent_instance", {})
utg.add_resource_unittests(RT.InstrumentAgent, "instrument_agent", {"driver_module": "potato"})
utg.add_resource_unittests(RT.InstrumentDevice, "instrument_device", {"serial_number": "123", "firmware_version": "x"})
utg.add_resource_unittests(RT.InstrumentModel, "instrument_model")
utg.add_resource_unittests(RT.PlatformAgentInstance, "platform_agent_instance", {})
utg.add_resource_unittests(RT.PlatformAgent, "platform_agent", {"description": "the big donut"})
utg.add_resource_unittests(RT.PlatformDevice, "platform_device", {"serial_number": "2345"})
utg.add_resource_unittests(RT.PlatformModel, "platform_model", {"description": "desc"})
utg.add_resource_unittests(RT.SensorDevice, "sensor_device", {"serial_number": "123"})
utg.add_resource_unittests(RT.SensorModel, "sensor_model") | {
"content_hash": "b0236032d223a6fe4d609466589fc911",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 124,
"avg_line_length": 38.43076923076923,
"alnum_prop": 0.7465972778222578,
"repo_name": "ooici/coi-services",
"id": "7db0e40ef5e0c47a726cdfbf64f5d68953e36658",
"size": "2521",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ion/services/sa/instrument/test/test_instrument_management_service.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "C",
"bytes": "403012"
},
{
"name": "C++",
"bytes": "251803"
},
{
"name": "CSS",
"bytes": "689"
},
{
"name": "Erlang",
"bytes": "532"
},
{
"name": "JavaScript",
"bytes": "11627"
},
{
"name": "Objective-C",
"bytes": "8918"
},
{
"name": "Python",
"bytes": "7964384"
},
{
"name": "Shell",
"bytes": "9221"
},
{
"name": "nesC",
"bytes": "57712131"
}
],
"symlink_target": ""
} |
import re
import sys
import timeit
from collections import Counter
from textwrap import wrap
from django.conf import settings
from django.db import connections
from django.utils import termcolors
from . qc_settings import QC_SETTINGS
try:
from django.utils.deprecation import MiddlewareMixin
except ImportError:
MiddlewareMixin = object
class QueryCountMiddleware(MiddlewareMixin):
"""This middleware prints the number of database queries for each http
request and response. This code is adapted from: http://goo.gl/UUKN0r.
NOTE: This middleware is predominately written in the pre-Django 1.10 style,
and uses the MiddlewareMixin for compatibility:
https://docs.djangoproject.com/en/1.11/topics/http/middleware/#upgrading-pre-django-1-10-style-middleware
"""
READ_QUERY_REGEX = re.compile("SELECT .*")
def __init__(self, *args, **kwargs):
# Call super first, so the MiddlewareMixin's __init__ does its thing.
super(QueryCountMiddleware, self).__init__(*args, **kwargs)
if settings.DEBUG:
self.request_path = None
self.stats = {"request": {}, "response": {}}
self.dbs = [c.alias for c in connections.all()]
self.queries = Counter()
self._reset_stats()
self._start_time = None
self._end_time = None
self.host = None # The HTTP_HOST pulled from the request
# colorizing methods
self.white = termcolors.make_style(opts=('bold',), fg='white')
self.red = termcolors.make_style(opts=('bold',), fg='red')
self.yellow = termcolors.make_style(opts=('bold',), fg='yellow')
self.green = termcolors.make_style(fg='green')
# query type detection regex
# TODO: make stats classification regex more robust
self.threshold = QC_SETTINGS['THRESHOLDS']
def _reset_stats(self):
self.stats = {"request": {}, "response": {}}
for alias in self.dbs:
self.stats["request"][alias] = {'writes': 0, 'reads': 0, 'total': 0}
self.stats["response"][alias] = {'writes': 0, 'reads': 0, 'total': 0}
self.queries = Counter()
def _count_queries(self, which):
for c in connections.all():
for q in c.queries:
if not self._ignore_sql(q):
if q.get('sql') and self.READ_QUERY_REGEX.search(q['sql']) is not None:
self.stats[which][c.alias]['reads'] += 1
else:
self.stats[which][c.alias]['writes'] += 1
self.stats[which][c.alias]['total'] += 1
self.queries[q['sql']] += 1
# We'll show the worst offender; i.e. the query with the most duplicates
duplicates = self.queries.most_common(1)
if duplicates:
sql, count = duplicates[0]
self.stats[which][c.alias]['duplicates'] = count
else:
self.stats[which][c.alias]['duplicates'] = 0
def _ignore_request(self, path):
"""Check to see if we should ignore the request."""
return any([
re.match(pattern, path) for pattern in QC_SETTINGS['IGNORE_REQUEST_PATTERNS']
])
def _ignore_sql(self, query):
"""Check to see if we should ignore the sql query."""
return any([
re.search(pattern, query.get('sql')) for pattern in QC_SETTINGS['IGNORE_SQL_PATTERNS']
])
def process_request(self, request):
if settings.DEBUG and not self._ignore_request(request.path):
self.host = request.META.get('HTTP_HOST', None)
self.request_path = request.path
self._start_time = timeit.default_timer()
self._count_queries("request")
def process_response(self, request, response):
if settings.DEBUG and not self._ignore_request(request.path):
self.request_path = request.path
self._end_time = timeit.default_timer()
self._count_queries("response")
# Add query count header, if enabled
if QC_SETTINGS['RESPONSE_HEADER'] is not None:
response[QC_SETTINGS['RESPONSE_HEADER']] = self._calculate_num_queries()
self.print_num_queries()
self._reset_stats()
return response
def _stats_table(self, which, path='', output=None):
if output is None:
if self.host:
host_string = 'http://{0}{1}'.format(self.host, self.request_path)
else:
host_string = self.request_path
output = self.white('\n{0}\n'.format(host_string))
output += "|------|-----------|----------|----------|----------|------------|\n"
output += "| Type | Database | Reads | Writes | Totals | Duplicates |\n"
output += "|------|-----------|----------|----------|----------|------------|\n"
for db, stats in self.stats[which].items():
if stats['total'] > 0:
line = "|{w}|{db}|{reads}|{writes}|{total}|{duplicates}|\n".format(
w=which.upper()[:4].center(6),
db=db.center(11),
reads=str(stats['reads']).center(10),
writes=str(stats['writes']).center(10),
total=str(stats['total']).center(10),
duplicates=str(stats['duplicates']).center(12)
)
output += self._colorize(line, stats['total'])
output += "|------|-----------|----------|----------|----------|------------|\n"
return output
def _duplicate_queries(self, output):
"""Appends the most common duplicate queries to the given output."""
if QC_SETTINGS['DISPLAY_DUPLICATES']:
for query, count in self.queries.most_common(QC_SETTINGS['DISPLAY_DUPLICATES']):
lines = ['\nRepeated {0} times.'.format(count)]
lines += wrap(query)
lines = "\n".join(lines) + "\n"
output += self._colorize(lines, count)
return output
def _totals(self, which):
reads = 0
writes = 0
for db, stats in self.stats[which].items():
reads += stats['reads']
writes += stats['writes']
return (reads, writes, reads + writes)
def _colorize(self, output, metric):
if metric > self.threshold['HIGH']:
output = self.red(output)
elif metric > self.threshold['MEDIUM']:
output = self.yellow(output)
else:
output = self.green(output)
return output
def print_num_queries(self):
# Request data
output = self._stats_table("request")
# Response data
output = self._stats_table("response", output=output)
# Summary of both
if self._end_time and self._start_time:
elapsed = self._end_time - self._start_time
else:
elapsed = 0
count = self._calculate_num_queries()
sum_output = 'Total queries: {0} in {1:.4f}s \n\n'.format(count, elapsed)
sum_output = self._colorize(sum_output, count)
sum_output = self._duplicate_queries(sum_output)
# runserver just prints its output to sys.stderr, so we'll do that too.
if elapsed >= self.threshold['MIN_TIME_TO_LOG'] and count >= self.threshold['MIN_QUERY_COUNT_TO_LOG']:
sys.stderr.write(output)
sys.stderr.write(sum_output)
def _calculate_num_queries(self):
"""
Calculate the total number of request and response queries.
Used for count header and count table.
"""
request_totals = self._totals("request")
response_totals = self._totals("response")
return request_totals[2] + response_totals[2] # sum total queries
| {
"content_hash": "034c8d6eff1e9b3c9218afdef365f852",
"timestamp": "",
"source": "github",
"line_count": 201,
"max_line_length": 110,
"avg_line_length": 39.527363184079604,
"alnum_prop": 0.5534298300818125,
"repo_name": "bradmontgomery/django-querycount",
"id": "1f051edab09cc8810f832faf6fb08e491b7844b7",
"size": "7945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "querycount/middleware.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "13839"
}
],
"symlink_target": ""
} |
from .notify import Slackify | {
"content_hash": "2421915de0ca51e145105e0261f17a45",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 28,
"avg_line_length": 28,
"alnum_prop": 0.8571428571428571,
"repo_name": "datasnakes/Datasnakes-Scripts",
"id": "912f5d114b0bc4cb044f9ae4ea772cab4b479cf1",
"size": "28",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "OrthoEvol/Tools/slackify/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1170"
},
{
"name": "HTML",
"bytes": "126123"
},
{
"name": "JavaScript",
"bytes": "181413"
},
{
"name": "PLSQL",
"bytes": "172401"
},
{
"name": "PLpgSQL",
"bytes": "48883"
},
{
"name": "Perl",
"bytes": "296439"
},
{
"name": "Perl 6",
"bytes": "1177"
},
{
"name": "Python",
"bytes": "279137"
},
{
"name": "SQLPL",
"bytes": "35289"
},
{
"name": "Shell",
"bytes": "5378"
},
{
"name": "TeX",
"bytes": "18176"
}
],
"symlink_target": ""
} |
'''
A sample user authentication module
Latest version can be found at https://github.com/dakside/pydemo
References:
Python documentation:
https://docs.python.org/
Python unittest
https://docs.python.org/3/library/unittest.html
--
argparse module:
https://docs.python.org/3/howto/argparse.html
PEP 257 - Python Docstring Conventions:
https://www.python.org/dev/peps/pep-0257/
@author: Le Tuan Anh <[email protected]>
'''
# Copyright (c) 2017, Le Tuan Anh <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uberapp.data import usernames
# ------------------------------------------------------------------------------
# Configuration
# ------------------------------------------------------------------------------
user_db = {n: n[::-1] for n in usernames}
# ------------------------------------------------------------------------------
# Functions
# ------------------------------------------------------------------------------
def authenticate(username, password):
if username not in user_db:
return False
else:
return password == user_db[username]
| {
"content_hash": "15adccc15f724ee0a64a4f9d7cf56471",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 80,
"avg_line_length": 38.1578947368421,
"alnum_prop": 0.6271264367816092,
"repo_name": "letuananh/pydemo",
"id": "c9f6902221f6f0488653e8c363dece4cf62aa548",
"size": "2223",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "project_template/uberapp/userlib/userauth.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "57876"
},
{
"name": "Shell",
"bytes": "284"
}
],
"symlink_target": ""
} |
"""Add volume_connectors table
Revision ID: daa1ba02d98
Revises: c14cef6dfedf
Create Date: 2015-11-26 17:19:22.074989
"""
# revision identifiers, used by Alembic.
revision = 'daa1ba02d98'
down_revision = 'bcdd431ba0bf'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table('volume_connectors',
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('uuid', sa.String(length=36), nullable=True),
sa.Column('node_id', sa.Integer(), nullable=True),
sa.Column('type', sa.String(length=32), nullable=True),
sa.Column('connector_id', sa.String(length=255),
nullable=True),
sa.Column('extra', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['node_id'], ['nodes.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('type', 'connector_id',
name='uniq_volumeconnectors0type0'
'connector_id'),
sa.UniqueConstraint('uuid',
name='uniq_volumeconnectors0uuid'),
mysql_charset='utf8',
mysql_engine='InnoDB')
| {
"content_hash": "f0cc2ed7e0beec0c20a9bf9468d61747",
"timestamp": "",
"source": "github",
"line_count": 36,
"max_line_length": 75,
"avg_line_length": 41.22222222222222,
"alnum_prop": 0.5175202156334232,
"repo_name": "jiazichenzhan/Server_Manage_Plugin",
"id": "90e1b01ace6088e8967acd6473ee2c8b5cb24b5d",
"size": "2057",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "ironic-plugin-pike/ironic/db/sqlalchemy/alembic/versions/daa1ba02d98_add_volume_connectors_table.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "349"
},
{
"name": "Python",
"bytes": "5720362"
},
{
"name": "Ruby",
"bytes": "986"
},
{
"name": "Shell",
"bytes": "128352"
}
],
"symlink_target": ""
} |
from azure.identity import DefaultAzureCredential
from azure.mgmt.logz import MicrosoftLogz
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-logz
# USAGE
python sub_account_tag_rules_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = MicrosoftLogz(
credential=DefaultAzureCredential(),
subscription_id="00000000-0000-0000-0000-000000000000",
)
response = client.sub_account_tag_rules.get(
resource_group_name="myResourceGroup",
monitor_name="myMonitor",
sub_account_name="SubAccount1",
rule_set_name="default",
)
print(response)
# x-ms-original-file: specification/logz/resource-manager/Microsoft.Logz/stable/2020-10-01/examples/SubAccountTagRules_Get.json
if __name__ == "__main__":
main()
| {
"content_hash": "27cd245902d61997ddd887bd4e722e99",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 127,
"avg_line_length": 32.714285714285715,
"alnum_prop": 0.7161572052401747,
"repo_name": "Azure/azure-sdk-for-python",
"id": "3cd986c802b467423e56ec8015ac7b13c8218019",
"size": "1613",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "sdk/logz/azure-mgmt-logz/generated_samples/sub_account_tag_rules_get.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1224"
},
{
"name": "Bicep",
"bytes": "24196"
},
{
"name": "CSS",
"bytes": "6089"
},
{
"name": "Dockerfile",
"bytes": "4892"
},
{
"name": "HTML",
"bytes": "12058"
},
{
"name": "JavaScript",
"bytes": "8137"
},
{
"name": "Jinja",
"bytes": "10377"
},
{
"name": "Jupyter Notebook",
"bytes": "272022"
},
{
"name": "PowerShell",
"bytes": "518535"
},
{
"name": "Python",
"bytes": "715484989"
},
{
"name": "Shell",
"bytes": "3631"
}
],
"symlink_target": ""
} |
import pandas as pd
from argparse import ArgumentParser
def main():
parser = ArgumentParser(description="Split sequencing_summary.txt using barcoding_summary.txt")
parser.add_argument("sequencing_summary",
help="sequencing_summary.txt from guppy, can be compressed.")
parser.add_argument("barcoding_summary",
help="barcoding_summary.txt from guppy, can be compressed.")
args = parser.parse_args()
bc = pd.read_csv(args.barcoding_summary, sep="\t", usecols=['read_id', 'barcode_arrangement'])
df = pd.read_csv(args.sequencing_summary, sep="\t")
for barc in bc["barcode_arrangement"].unique():
df[df["read_id"].isin(bc.loc[bc["barcode_arrangement"] == barc, 'read_id'])] \
.to_csv("sequencing_summary_{}.txt".format(barc), sep="\t", index=False)
if __name__ == '__main__':
main()
| {
"content_hash": "335bf952edac6a9263140ecc2f1de690",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 99,
"avg_line_length": 44.1,
"alnum_prop": 0.6405895691609977,
"repo_name": "wdecoster/NanoPlot",
"id": "ca412521c0bc43b70c35e4ed47befbe2e4e92e57",
"size": "882",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/split_summary_on_barcodes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "85744"
},
{
"name": "Shell",
"bytes": "1688"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from django.conf import settings
# When True ThumbnailNode.render can raise errors
THUMBNAIL_DEBUG = False
# Backend
THUMBNAIL_BACKEND = 'sorl.thumbnail.base.ThumbnailBackend'
# Key-value store, ships with:
# sorl.thumbnail.kvstores.cached_db_kvstore.KVStore
# sorl.thumbnail.kvstores.redis_kvstore.KVStore
# Redis requires some more work, see docs
THUMBNAIL_KVSTORE = 'sorl.thumbnail.kvstores.cached_db_kvstore.KVStore'
# Change this to something else for MSSQL
THUMBNAIL_KEY_DBCOLUMN = 'key'
# Engine, ships with:
# sorl.thumbnail.engines.convert_engine.Engine
# sorl.thumbnail.engines.pil_engine.Engine
# sorl.thumbnail.engines.pgmagick_engine.Engine
# convert is preferred but requires imagemagick or graphicsmagick, se docs
THUMBNAIL_ENGINE = 'sorl.thumbnail.engines.pil_engine.Engine'
# Path to Imagemagick or Graphicsmagick ``convert`` and ``identify``.
THUMBNAIL_CONVERT = 'convert'
THUMBNAIL_IDENTIFY = 'identify'
# Storage for the generated thumbnails
THUMBNAIL_STORAGE = settings.DEFAULT_FILE_STORAGE
# Redis settings
THUMBNAIL_REDIS_DB = 0
THUMBNAIL_REDIS_PASSWORD = ''
THUMBNAIL_REDIS_HOST = 'localhost'
THUMBNAIL_REDIS_PORT = 6379
THUMBNAIL_REDIS_UNIX_SOCKET_PATH = None
# DBM settings
THUMBNAIL_DBM_FILE = "thumbnail_kvstore"
THUMBNAIL_DBM_MODE = 0o644
# Cache timeout for ``cached_db`` store. You should probably keep this at
# maximum or ``0`` if your caching backend can handle that as infinate.
THUMBNAIL_CACHE_TIMEOUT = 3600 * 24 * 365 * 10 # 10 years
# The cache configuration to use for storing thumbnail data
THUMBNAIL_CACHE = 'default'
# Key prefix used by the key value store
THUMBNAIL_KEY_PREFIX = 'sorl-thumbnail'
# Thumbnail filename prefix
THUMBNAIL_PREFIX = 'cache/'
# Image format, common formats are: JPEG, PNG
# Make sure the backend can handle the format you specify
THUMBNAIL_FORMAT = 'JPEG'
THUMBNAIL_PRESERVE_FORMAT = False
# Colorspace, backends are required to implement: RGB, GRAY
# Setting this to None will keep the original colorspace.
THUMBNAIL_COLORSPACE = 'RGB'
# Should we upscale images by default
THUMBNAIL_UPSCALE = True
# Quality, 0-100
THUMBNAIL_QUALITY = 95
# Gaussian blur radius
THUMBNAIL_BLUR = 0
# Adds padding around the image to match the requested size without cropping
THUMBNAIL_PADDING = False
THUMBNAIL_PADDING_COLOR = '#ffffff'
# Save as progressive when saving as jpeg
THUMBNAIL_PROGRESSIVE = True
# Orientate the thumbnail with respect to source EXIF orientation tag
THUMBNAIL_ORIENTATION = True
# This means sorl.thumbnail will generate and serve a generated dummy image
# regardless of the thumbnail source content
THUMBNAIL_DUMMY = False
# Thumbnail dummy (placeholder) source. Some you might try are:
# http://placekitten.com/%(width)s/%(height)s
# http://placekitten.com/g/%(width)s/%(height)s
# http://placehold.it/%(width)sx%(height)s
THUMBNAIL_DUMMY_SOURCE = 'http://dummyimage.com/%(width)sx%(height)s'
# Sets the source image ratio for dummy generation of images with only width
# or height given
THUMBNAIL_DUMMY_RATIO = 1.5
# Enables creation of multiple-resolution (aka "Retina") images.
# We don't create retina images by default to optimize performance.
THUMBNAIL_ALTERNATIVE_RESOLUTIONS = []
# Lazy fill empty thumbnail like THUMBNAIL_DUMMY
THUMBNAIL_LAZY_FILL_EMPTY = False
# Timeout, in seconds, to use when retrieving images with urllib2
THUMBNAIL_URL_TIMEOUT = None
# Default width when using filters for texts
THUMBNAIL_FILTER_WIDTH = 500
# Should we flatten images by default (fixes a lot of transparency issues with
# imagemagick)
THUMBNAIL_FLATTEN = False
| {
"content_hash": "25fa47e4f16a2c6f849c66173d97b8ad",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 78,
"avg_line_length": 31.443478260869565,
"alnum_prop": 0.7743362831858407,
"repo_name": "edisonlz/fruit",
"id": "83637e93d2b945fd362d4f11786d587366bb189c",
"size": "3616",
"binary": false,
"copies": "16",
"ref": "refs/heads/master",
"path": "web_project/base/site-packages/sorl/thumbnail/conf/defaults.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ApacheConf",
"bytes": "1482"
},
{
"name": "Batchfile",
"bytes": "6714"
},
{
"name": "C",
"bytes": "3085"
},
{
"name": "C++",
"bytes": "4823"
},
{
"name": "CSS",
"bytes": "660927"
},
{
"name": "DIGITAL Command Language",
"bytes": "27853"
},
{
"name": "GAP",
"bytes": "6045"
},
{
"name": "Go",
"bytes": "13616"
},
{
"name": "Groff",
"bytes": "7199"
},
{
"name": "HTML",
"bytes": "7678961"
},
{
"name": "Java",
"bytes": "208173"
},
{
"name": "JavaScript",
"bytes": "2626051"
},
{
"name": "Makefile",
"bytes": "16810"
},
{
"name": "Nginx",
"bytes": "19215"
},
{
"name": "PHP",
"bytes": "205978"
},
{
"name": "Perl",
"bytes": "27627"
},
{
"name": "Python",
"bytes": "15609476"
},
{
"name": "Shell",
"bytes": "13663"
},
{
"name": "TeX",
"bytes": "60714"
}
],
"symlink_target": ""
} |
"""Automatically fix simple style guide violations."""
__author__ = '[email protected] (Robert Walker)'
import sys
import os
closure_path = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'))
gflags_path = os.path.abspath(os.path.join(closure_path, 'gflags'))
sys.path += [ closure_path, gflags_path ]
import gflags as flags
from closure_linter import error_fixer
from closure_linter import runner
from closure_linter.common import simplefileflags as fileflags
FLAGS = flags.FLAGS
flags.DEFINE_list('additional_extensions', None, 'List of additional file '
'extensions (not js) that should be treated as '
'JavaScript files.')
def main(argv=None):
"""Main function.
Args:
argv: Sequence of command line arguments.
"""
if argv is None:
argv = flags.FLAGS(sys.argv)
suffixes = ['.js']
if FLAGS.additional_extensions:
suffixes += ['.%s' % ext for ext in FLAGS.additional_extensions]
files = fileflags.GetFileList(argv, 'JavaScript', suffixes)
fixer = error_fixer.ErrorFixer()
# Check the list of files.
for filename in files:
runner.Run(filename, fixer)
if __name__ == '__main__':
main()
| {
"content_hash": "ac4000149411ac62c157bb53bf1dc1d5",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 94,
"avg_line_length": 25.574468085106382,
"alnum_prop": 0.6821963394342762,
"repo_name": "asankah/closure-linter",
"id": "806f9dcca4571843737e4ee5774b27176f80f704",
"size": "1840",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "closure_linter/fixjsstyle.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "JavaScript",
"bytes": "116105"
},
{
"name": "Python",
"bytes": "437465"
}
],
"symlink_target": ""
} |
"""
This driver connects Cinder to an installed DRBDmanage instance, see
http://oss.linbit.com/drbdmanage/
http://git.linbit.com/drbdmanage.git/
for more details.
"""
import six
import socket
import uuid
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from cinder import exception
from cinder.i18n import _, _LW, _LI
from cinder.volume import driver
try:
import dbus
import drbdmanage.consts as dm_const
import drbdmanage.exceptions as dm_exc
import drbdmanage.utils as dm_utils
except ImportError:
dbus = None
dm_const = None
dm_exc = None
dm_utils = None
LOG = logging.getLogger(__name__)
drbd_opts = [
cfg.StrOpt('drbdmanage_redundancy',
default='1',
help='Number of nodes that should replicate the data.'),
cfg.BoolOpt('drbdmanage_devs_on_controller',
default=True,
help='''If set, the c-vol node will receive a useable
/dev/drbdX device, even if the actual data is stored on
other nodes only.
This is useful for debugging, maintenance, and to be
able to do the iSCSI export from the c-vol node.''')
# TODO(PM): offsite_redundancy?
# TODO(PM): choose DRBDmanage storage pool?
]
CONF = cfg.CONF
CONF.register_opts(drbd_opts)
AUX_PROP_CINDER_VOL_ID = "cinder-id"
DM_VN_PREFIX = 'CV_' # sadly 2CV isn't allowed by DRBDmanage
DM_SN_PREFIX = 'SN_'
class DrbdManageDriver(driver.VolumeDriver):
"""Cinder driver that uses DRBDmanage for storage."""
VERSION = '1.0.0'
drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
drbdmanage_dbus_interface = '/interface'
def __init__(self, *args, **kwargs):
self.empty_list = dbus.Array([], signature="a(s)")
self.empty_dict = dbus.Array([], signature="a(ss)")
super(DrbdManageDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(drbd_opts)
if not self.drbdmanage_dbus_name:
self.drbdmanage_dbus_name = 'org.drbd.drbdmanaged'
if not self.drbdmanage_dbus_interface:
self.drbdmanage_dbus_interface = '/interface'
self.drbdmanage_redundancy = int(getattr(self.configuration,
'drbdmanage_redundancy', 1))
self.drbdmanage_devs_on_controller = bool(
getattr(self.configuration,
'drbdmanage_devs_on_controller',
True))
self.dm_control_vol = ".drbdctrl"
# Copied from the LVM driver, see
# I43190d1dac33748fe55fa00f260f32ab209be656
target_driver = self.target_mapping[
self.configuration.safe_get('iscsi_helper')]
LOG.debug('Attempting to initialize DRBD driver with the '
'following target_driver: %s',
target_driver)
self.target_driver = importutils.import_object(
target_driver,
configuration=self.configuration,
db=self.db,
executor=self._execute)
def dbus_connect(self):
self.odm = dbus.SystemBus().get_object(self.drbdmanage_dbus_name,
self.drbdmanage_dbus_interface)
self.odm.ping()
def call_or_reconnect(self, fn, *args):
"""Call DBUS function; on a disconnect try once to reconnect."""
try:
return fn(*args)
except dbus.DBusException as e:
LOG.warning(_LW("Got disconnected; trying to reconnect. (%s)"), e)
self.dbus_connect()
# Old function object is invalid, get new one.
return getattr(self.odm, fn._method_name)(*args)
def do_setup(self, context):
"""Any initialization the volume driver does while starting."""
super(DrbdManageDriver, self).do_setup(context)
self.dbus_connect()
def check_for_setup_error(self):
"""Verify that requirements are in place to use DRBDmanage driver."""
if not all((dbus, dm_exc, dm_const, dm_utils)):
msg = _('DRBDmanage driver setup error: some required '
'libraries (dbus, drbdmanage.*) not found.')
LOG.error(msg)
raise exception.VolumeDriverException(message=msg)
if self.odm.ping() != 0:
message = _('Cannot ping DRBDmanage backend')
raise exception.VolumeBackendAPIException(data=message)
def _clean_uuid(self):
"""Returns a UUID string, WITHOUT braces."""
# Some uuid library versions put braces around the result!?
# We don't want them, just a plain [0-9a-f-]+ string.
id = str(uuid.uuid4())
id = id.replace("{", "")
id = id.replace("}", "")
return id
def _check_result(self, res, ignore=None, ret=0):
seen_success = False
seen_error = False
result = ret
for (code, fmt, arg_l) in res:
# convert from DBUS to Python
arg = dict(arg_l)
if ignore and code in ignore:
if not result:
result = code
continue
if code == dm_exc.DM_SUCCESS:
seen_success = True
continue
seen_error = _("Received error string: %s") % (fmt % arg)
if seen_error:
raise exception.VolumeBackendAPIException(data=seen_error)
if seen_success:
return ret
# by default okay - or the ignored error code.
return ret
# DRBDmanage works in kiB units; Cinder uses GiB.
def _vol_size_to_dm(self, size):
return int(size * units.Gi / units.Ki)
def _vol_size_to_cinder(self, size):
return int(size * units.Ki / units.Gi)
def is_clean_volume_name(self, name, prefix):
try:
if (name.startswith(CONF.volume_name_template % "") and
uuid.UUID(name[7:]) is not None):
return prefix + name[7:]
except ValueError:
return None
try:
if uuid.UUID(name) is not None:
return prefix + name
except ValueError:
return None
def _priv_hash_from_volume(self, volume):
return dm_utils.dict_to_aux_props({
AUX_PROP_CINDER_VOL_ID: volume['id'],
})
def snapshot_name_from_cinder_snapshot(self, snapshot):
sn_name = self.is_clean_volume_name(snapshot['id'], DM_SN_PREFIX)
return sn_name
def _res_and_vl_data_for_volume(self, volume, empty_ok=False):
"""Find DRBD resource and volume ID.
A DRBD resource might consist of several "volumes"
(think consistency groups).
So we have to find the number of the volume within one resource.
Returns resource name, volume number, and resource
and volume properties.
"""
# If we get a string, use it as-is.
# Else it's a dictionary; then get the ID.
if isinstance(volume, six.string_types):
v_uuid = volume
else:
v_uuid = volume['id']
res, rl = self.call_or_reconnect(self.odm.list_volumes,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: v_uuid}),
self.empty_dict)
self._check_result(res)
if (not rl) or (len(rl) == 0):
if empty_ok:
LOG.debug("No volume %s found.", v_uuid)
return None, None, None, None
raise exception.VolumeBackendAPIException(
data=_("volume %s not found in drbdmanage") % v_uuid)
if len(rl) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with name %s found by drbdmanage") %
v_uuid)
(r_name, r_props, vols) = rl[0]
if len(vols) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one volume with id %s") %
v_uuid)
(v_nr, v_props) = vols[0]
LOG.debug("volume %(uuid)s is %(res)s/%(nr)d; %(rprop)s, %(vprop)s",
{'uuid': v_uuid, 'res': r_name, 'nr': v_nr,
'rprop': r_props, 'vprop': v_props})
return r_name, v_nr, r_props, v_props
def _resource_and_snap_data_from_snapshot(self, snapshot, empty_ok=False):
"""Find DRBD resource and snapshot name from the snapshot ID."""
s_uuid = snapshot['id']
res, rs = self.call_or_reconnect(self.odm.list_snapshots,
self.empty_dict,
self.empty_dict,
0,
dm_utils.dict_to_aux_props(
{AUX_PROP_CINDER_VOL_ID: s_uuid}),
self.empty_dict)
self._check_result(res)
if (not rs) or (len(rs) == 0):
if empty_ok:
return None
else:
raise exception.VolumeBackendAPIException(
data=_("no snapshot with id %s found in drbdmanage") %
s_uuid)
if len(rs) > 1:
raise exception.VolumeBackendAPIException(
data=_("multiple resources with snapshot ID %s found") %
s_uuid)
(r_name, snaps) = rs[0]
if len(snaps) != 1:
raise exception.VolumeBackendAPIException(
data=_("not exactly one snapshot with id %s") % s_uuid)
(s_name, s_props) = snaps[0]
LOG.debug("snapshot %(uuid)s is %(res)s/%(snap)s",
{'uuid': s_uuid, 'res': r_name, 'snap': s_name})
return r_name, s_name, s_props
def _resource_name_volnr_for_volume(self, volume, empty_ok=False):
res, vol, _, _ = self._res_and_vl_data_for_volume(volume, empty_ok)
return res, vol
def local_path(self, volume):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res, data = self.call_or_reconnect(self.odm.text_query,
[dm_const.TQ_GET_PATH,
dres,
str(dvol)])
self._check_result(res)
if len(data) == 1:
return data[0]
message = _('Got bad path information from DRBDmanage! (%s)') % data
raise exception.VolumeBackendAPIException(data=message)
def create_volume(self, volume):
"""Creates a DRBD resource.
We address it later on via the ID that gets stored
as a private property.
"""
# TODO(PM): consistency groups
dres = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
res = self.call_or_reconnect(self.odm.create_resource,
dres,
self.empty_dict)
self._check_result(res, ignore=[dm_exc.DM_EEXIST], ret=None)
# If we get DM_EEXIST, then the volume already exists, eg. because
# deploy gave an error on a previous try (like ENOSPC).
# Still, there might or might not be the volume in the resource -
# we have to check that explicitly.
(_, drbd_vol) = self._resource_name_volnr_for_volume(volume,
empty_ok=True)
if not drbd_vol:
props = self._priv_hash_from_volume(volume)
# TODO(PM): properties - redundancy, etc
res = self.call_or_reconnect(self.odm.create_volume,
dres,
self._vol_size_to_dm(volume['size']),
props)
self._check_result(res)
# If we crashed between create_volume and the deploy call,
# the volume might be defined but not exist on any server. Oh my.
res = self.call_or_reconnect(self.odm.auto_deploy,
dres, self.drbdmanage_redundancy,
0, True)
self._check_result(res)
if self.drbdmanage_devs_on_controller:
# FIXME: Consistency groups, vol#
res = self.call_or_reconnect(self.odm.assign,
socket.gethostname(),
dres,
self.empty_dict)
self._check_result(res, ignore=[dm_exc.DM_EEXIST])
return 0
def delete_volume(self, volume):
"""Deletes a resource."""
dres, dvol = self._resource_name_volnr_for_volume(
volume,
empty_ok=True)
if not dres:
# OK, already gone.
return True
# TODO(PM): check if in use? Ask whether Primary, or just check result?
res = self.call_or_reconnect(self.odm.remove_volume, dres, dvol, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
res, rl = self.call_or_reconnect(self.odm.list_volumes,
[dres],
0,
self.empty_dict,
self.empty_list)
self._check_result(res)
# We expect the _resource_ to be here still (we just got a volnr from
# it!), so just query the volumes.
# If the resource has no volumes anymore, the current DRBDmanage
# version (errorneously, IMO) returns no *resource*, too.
if len(rl) > 1:
message = _('DRBDmanage expected one resource ("%(res)s"), '
'got %(n)d') % {'res': dres, 'n': len(rl)}
raise exception.VolumeBackendAPIException(data=message)
# Delete resource, if empty
if (not rl) or (not rl[0]) or (len(rl[0][2]) == 0):
res = self.call_or_reconnect(self.odm.remove_resource, dres, False)
self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_volume_from_snapshot(self, volume, snapshot):
"""Creates a volume from a snapshot."""
LOG.debug("create vol from snap: from %(snap)s make %(vol)s",
{'snap': snapshot['id'], 'vol': volume['id']})
# TODO(PM): Consistency groups.
dres, sname, sprop = self._resource_and_snap_data_from_snapshot(
snapshot)
new_res = self.is_clean_volume_name(volume['id'], DM_VN_PREFIX)
r_props = self.empty_dict
# TODO(PM): consistency groups => different volume number possible
v_props = [(0, self._priv_hash_from_volume(volume))]
res = self.call_or_reconnect(self.odm.restore_snapshot,
new_res,
dres,
sname,
r_props,
v_props)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
def create_cloned_volume(self, volume, src_vref):
temp_id = self._clean_uuid()
snapshot = {'id': temp_id}
self.create_snapshot({'id': temp_id, 'volume_id': src_vref['id']})
self.create_volume_from_snapshot(volume, snapshot)
self.delete_snapshot(snapshot)
def _update_volume_stats(self):
data = {}
data["vendor_name"] = 'Open Source'
data["driver_version"] = self.VERSION
data["storage_protocol"] = self.target_driver.protocol
# This has to match the name set in the cinder volume driver spec,
# so keep it lowercase
data["volume_backend_name"] = "drbdmanage"
data["pools"] = []
res, free, total = self.call_or_reconnect(self.odm.cluster_free_query,
self.drbdmanage_redundancy)
self._check_result(res)
location_info = ('DrbdManageDriver:%(cvol)s:%(dbus)s' %
{'cvol': self.dm_control_vol,
'dbus': self.drbdmanage_dbus_name})
# TODO(PM): multiple DRBDmanage instances and/or multiple pools
single_pool = {}
single_pool.update(dict(
pool_name=data["volume_backend_name"],
free_capacity_gb=self._vol_size_to_cinder(free),
total_capacity_gb=self._vol_size_to_cinder(total),
reserved_percentage=self.configuration.reserved_percentage,
location_info=location_info,
QoS_support=False))
data["pools"].append(single_pool)
self._stats = data
def get_volume_stats(self, refresh=True):
"""Get volume status."""
self._update_volume_stats()
return self._stats
def extend_volume(self, volume, new_size):
dres, dvol = self._resource_name_volnr_for_volume(volume)
res = self.call_or_reconnect(self.odm.resize_volume,
dres, dvol, -1,
{"size": self._vol_size_to_dm(new_size)},
0)
self._check_result(res)
return 0
def create_snapshot(self, snapshot):
"""Creates a snapshot."""
sn_name = self.snapshot_name_from_cinder_snapshot(snapshot)
dres, dvol = self._resource_name_volnr_for_volume(
snapshot["volume_id"])
res, data = self.call_or_reconnect(self.odm.list_assignments,
self.empty_dict,
[dres],
0,
self.empty_dict,
self.empty_dict)
self._check_result(res)
nodes = [d[0] for d in data]
if len(nodes) < 1:
raise exception.VolumeBackendAPIException(
_('Snapshot res "%s" that is not deployed anywhere?') %
(dres))
props = self._priv_hash_from_volume(snapshot)
res = self.call_or_reconnect(self.odm.create_snapshot,
dres, sn_name, nodes, props)
self._check_result(res)
def delete_snapshot(self, snapshot):
"""Deletes a snapshot."""
dres, sname, _ = self._resource_and_snap_data_from_snapshot(
snapshot, empty_ok=True)
if not dres:
# resource already gone?
LOG.warning(_LW("snapshot: %s not found, "
"skipping delete operation"), snapshot['id'])
LOG.info(_LI('Successfully deleted snapshot: %s'), snapshot['id'])
return True
res = self.call_or_reconnect(self.odm.remove_snapshot,
dres, sname, True)
return self._check_result(res, ignore=[dm_exc.DM_ENOENT])
# ####### Interface methods for DataPath (Target Driver) ########
def ensure_export(self, context, volume):
volume_path = self.local_path(volume)
return self.target_driver.ensure_export(
context,
volume,
volume_path)
def create_export(self, context, volume, connector):
volume_path = self.local_path(volume)
export_info = self.target_driver.create_export(
context,
volume,
volume_path)
return {'provider_location': export_info['location'],
'provider_auth': export_info['auth'], }
def remove_export(self, context, volume):
return self.target_driver.remove_export(context, volume)
def initialize_connection(self, volume, connector):
return self.target_driver.initialize_connection(volume, connector)
def validate_connector(self, connector):
return self.target_driver.validate_connector(connector)
def terminate_connection(self, volume, connector, **kwargs):
return None
| {
"content_hash": "2b873051d3925abf165a15b55edf1ef3",
"timestamp": "",
"source": "github",
"line_count": 534,
"max_line_length": 79,
"avg_line_length": 37.92696629213483,
"alnum_prop": 0.5363156075643115,
"repo_name": "scality/cinder",
"id": "464d8a5cc39ab5323a51e356ab9e4065a28d899b",
"size": "20898",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "cinder/volume/drivers/drbdmanagedrv.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "13431518"
},
{
"name": "Shell",
"bytes": "8222"
}
],
"symlink_target": ""
} |
from setuptools import setup, find_packages
requirements = ['flask', 'sqlalchemy>=0.7.4', 'sqlalchemy-migrate', 'gevent',
'python-daemon', 'pychef']
excludes = ['test_runner.py', 'tests', 'tests.*']
setup(name='python-opencenter',
version='1.0.0',
description='OpenCenter Orchestration server',
author='rcbops',
author_email='[email protected]',
url='https://github.com/rcbops/opencenter',
license='Apache',
classifiers=['Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intented Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independant',
'Programming Language :: Python',
],
include_package_data=True,
packages=find_packages(exclude=excludes),
install_requires=requirements,
entry_points={'console_scripts': ['opencenter = opencenter:main']},
)
| {
"content_hash": "a4282862d890937bf53056986b036901",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 77,
"avg_line_length": 40.7037037037037,
"alnum_prop": 0.5887170154686078,
"repo_name": "rcbops/opencenter",
"id": "981a30b90421b02e5b2e4bbaacbea1226210cadc",
"size": "2407",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "439541"
},
{
"name": "Shell",
"bytes": "5134"
}
],
"symlink_target": ""
} |
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, render_to_response, redirect
from django.views.generic.detail import DetailView
from django.contrib.sites.models import Site
from forms import *
from models import Pago
from sermepa.forms import SermepaPaymentForm
from sermepa.models import SermepaIdTPV
import logging
log = logging.getLogger("MatriculaEIDE")
class pagos_lista(ListView):
model = Pago
template_name="pagosonline/pago_list.html"
class crear_pago_manual(CreateView):
#model = Pago
form_class = PagoForm
template_name="pagosonline/pago_manual_crear.html"
class editar_pago_manual(UpdateView):
model = Pago
#form_class = PagoForm
template_name="pago_manual_editar.html"
fields = '__all__'
class borrar_pago_manual(DeleteView):
model = Pago
success_url ="/pagos/lista"
#form_class = PagoForm
template_name="pago_manual_borrar.html"
def pagar_manual(request,pago_id):
site = Site.objects.get_current()
site_domain = site.domain
pago = Pago.objects.get(id=pago_id)
merchant_parameters = {
"Ds_Merchant_Titular": 'John Doe',
"Ds_Merchant_MerchantData": 'man-%s' % pago.id, # id del Pedido o Carrito, para identificarlo en el mensaje de vuelta
"Ds_Merchant_MerchantName": settings.SERMEPA_COMERCIO,
"Ds_Merchant_ProductDescription": 'eide-onlinepayment-%s' % pago.id,
"Ds_Merchant_Amount": int(pago.importe * 100),
"Ds_Merchant_Terminal": settings.SERMEPA_TERMINAL,
"Ds_Merchant_MerchantCode": settings.SERMEPA_MERCHANT_CODE,
"Ds_Merchant_Currency": settings.SERMEPA_CURRENCY,
"Ds_Merchant_MerchantURL": settings.SERMEPA_URL_DATA,
"Ds_Merchant_UrlOK": "http://%s%s" % (site_domain, reverse('pago_ok')),
"Ds_Merchant_UrlKO": "http://%s%s" % (site_domain, reverse('pago_ko')),
# "Ds_Merchant_Order": SermepaIdTPV.objects.new_idtpv(),
"Ds_Merchant_TransactionType": '0',
}
order = SermepaIdTPV.objects.new_idtpv() # Tiene que ser un número único cada vez
print "Tenemos la order ", order
merchant_parameters.update({
"Ds_Merchant_Order": order,
"Ds_Merchant_TransactionType": '0',
})
form = SermepaPaymentForm(merchant_parameters=merchant_parameters)
print "Tenemos el form"
print form.render()
return render_to_response('pagosonline/pago_manual_pagar.html', context={'form': form, 'debug': settings.DEBUG, 'pago': pago})
class PagoManual(DetailView):
template_name = "pagosonline/pago_manual_pagar.html"
model = Pago
def get_context_data(self, **kwargs):
context = super(PagoManual, self).get_context_data(**kwargs)
context['pago']=self.object
site = Site.objects.get_current()
site_domain = site.domain
pago = self.object
merchant_parameters = {
"Ds_Merchant_Titular": 'John Doe',
"Ds_Merchant_MerchantData": 'man-%s' % pago.id, # id del Pedido o Carrito, para identificarlo en el mensaje de vuelta
"Ds_Merchant_MerchantName": settings.SERMEPA_COMERCIO,
"Ds_Merchant_ProductDescription": 'eide-onlinepayment-%s' % pago.id,
"Ds_Merchant_Amount": int(pago.importe * 100),
"Ds_Merchant_Terminal": settings.SERMEPA_TERMINAL,
"Ds_Merchant_MerchantCode": settings.SERMEPA_MERCHANT_CODE,
"Ds_Merchant_Currency": settings.SERMEPA_CURRENCY,
"Ds_Merchant_MerchantURL": settings.SERMEPA_URL_DATA,
"Ds_Merchant_UrlOK": "http://%s%s" % (site_domain, reverse('pago_ok')),
"Ds_Merchant_UrlKO": "http://%s%s" % (site_domain, reverse('pago_ko')),
# "Ds_Merchant_Order": SermepaIdTPV.objects.new_idtpv(),
"Ds_Merchant_TransactionType": '0',
}
order = SermepaIdTPV.objects.new_idtpv() # Tiene que ser un número único cada vez
print "Tenemos la order ", order
merchant_parameters.update({
"Ds_Merchant_Order": order,
"Ds_Merchant_TransactionType": '0',
})
form = SermepaPaymentForm(merchant_parameters=merchant_parameters)
print "Tenemos el form"
print form.render()
context['form']=form
return context
def make_payment(request, reference, order_id):
""" Recibimos un texto de referencia, el ID de la orden y una cantidad en euros (sin decimales)"""
return direct_to_template(request,
template= "pagosonline/pago.html",
extra_context={"payament_info": payament_info(reference, order_id)})
@csrf_exempt
def confirm_payment(request):
## FIXME habría que poner algun filtro a la confirmación del pago.
log.debug("Recibimos una confirmación de pago")
log.debug(request.POST)
try:
#Leemos el bumero de operación donde tenemo s la referencia a la matricula
log.debug("Vamos a leer el Num_operacion para ver que vamos a confirmar")
reference = request.POST["Num_operacion"]
log.debug("tenemos la referencia: %s"%reference)
registration_type = reference.split('-')[0]
registration_id = reference.split('-')[1]
log.debug( "tenemos una matricula de %s con el id %s"%(registration_type, registration_id))
r = None
#Buscamos la matricula
if registration_type=="cambridge":
log.debug("Es cambridge la buscamos en BBDD")
r = Registration.objects.get(id=registration_id)
elif registration_type=="manual":
log.debug("Vamos a confirmar un pago manual. Lo buscamos en BBDD...")
r = Pago.objects.get(id=registration_id)
log.debug("Hemos encontrado el pago manual %s"%r.id)
else:
log.debug( "No sabemos que tipo de matricula es!" )
#Comprobamos si tenemos una matricula
if r:
log.debug( "Tenemos la matricula/pago, vamos a marcalo como pagado")
r.set_as_paid()
log.debug( "Mostramos al TPV la pagina de pago OK")
return direct_to_template(request,template="pago_confirmar.html")
else:
return direct_to_template(request,template="pago_noconfirmar.html")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
log.debug("No hemos sido capaces de validar el pago de la matricula ha fallado el try con la excepcion: %s %s %s"%(exc_type,exc_value,exc_traceback))
log.debug(exc_type)
log.debug(exc_value)
log.debug(exc_traceback)
return direct_to_template(request,template="pago_noconfirmar.html")
| {
"content_hash": "57b48aad54a023d0d70a4327a0c2f412",
"timestamp": "",
"source": "github",
"line_count": 158,
"max_line_length": 157,
"avg_line_length": 42.71518987341772,
"alnum_prop": 0.6578752407764114,
"repo_name": "jonlatorre/MatriculaEIDE",
"id": "b7c77b52f58f9d2974fd457adcf5942215da0015",
"size": "6782",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pagosonline/views.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "1067"
},
{
"name": "HTML",
"bytes": "137504"
},
{
"name": "JavaScript",
"bytes": "204190"
},
{
"name": "Python",
"bytes": "195294"
},
{
"name": "Shell",
"bytes": "1504"
}
],
"symlink_target": ""
} |
import glob
import os
from PyInstaller.compat import is_win
from PyInstaller.hooks.hookutils import exec_statement
hiddenimports = ['gmodule', 'gobject']
def hook(mod):
statement = """
import os
import gst
reg = gst.registry_get_default()
plug = reg.find_plugin('coreelements')
pth = plug.get_filename()
print os.path.dirname(pth)
"""
plugin_path = exec_statement(statement)
if is_win:
# TODO Verify that on Windows gst plugins really end with .dll.
pattern = os.path.join(plugin_path, '*.dll')
else:
# Even on OSX plugins end with '.so'.
pattern = os.path.join(plugin_path, '*.so')
for f in glob.glob(pattern):
# 'f' contains absolute path.
mod.binaries.append((os.path.join('gst_plugins', os.path.basename(f)),
f, 'BINARY'))
return mod
| {
"content_hash": "f4cf5993a2863e74ccd0956bd8743365",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 78,
"avg_line_length": 25.242424242424242,
"alnum_prop": 0.6470588235294118,
"repo_name": "bl4ckdu5t/registron",
"id": "c84f47b09f6a4003779e2386e9a258c11bc7d11a",
"size": "1413",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "PyInstaller/hooks/hook-gst._gst.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "884174"
},
{
"name": "C++",
"bytes": "1272"
},
{
"name": "CSS",
"bytes": "3410"
},
{
"name": "Objective-C",
"bytes": "30562"
},
{
"name": "Perl",
"bytes": "4306"
},
{
"name": "Prolog",
"bytes": "640"
},
{
"name": "Python",
"bytes": "1753967"
},
{
"name": "Shell",
"bytes": "4016"
},
{
"name": "TeX",
"bytes": "186440"
},
{
"name": "Visual Basic",
"bytes": "166"
}
],
"symlink_target": ""
} |
import mock
import pytest
import requests
from icontrol import __version__ as VERSION
from icontrol import session
UA = 'f5-icontrol-rest-python/%s' % VERSION
@pytest.fixture()
def iCRS():
fake_iCRS = session.iControlRESTSession('admin', 'admin')
fake_iCRS.session = mock.MagicMock()
req = requests.PreparedRequest()
req.prepare(method='post', url='https://0.0.0.0/mgmt/tm/root/RESTiface/')
req.body = '{"foo": "bar"}'
fake_iCRS.session.prepare_request.return_value = req
mock_response = mock.MagicMock()
mock_response.status_code = 200
fake_iCRS.session.send.return_value = mock_response
return fake_iCRS
@pytest.fixture()
def iCRSBytes():
fake_iCRS = session.iControlRESTSession('admin', 'admin')
fake_iCRS.session = mock.MagicMock()
req = requests.PreparedRequest()
req.prepare(method='post', url='https://0.0.0.0/mgmt/tm/root/RESTiface/')
req.body = b'{"foo": "bar"}'
fake_iCRS.session.prepare_request.return_value = req
mock_response = mock.MagicMock()
mock_response.status_code = 200
fake_iCRS.session.send.return_value = mock_response
return fake_iCRS
@pytest.fixture()
def uparts():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/tm/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1',
'sub_path': '',
'suffix': '/members/m1',
'transform_name': False,
'transform_subpath': False}
return parts_dict
@pytest.fixture()
def transform_name():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/tm/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1: 1.1.1.1/24 bar1: /Common/DC1',
'sub_path': '',
'suffix': '/members/m1',
'transform_name': True,
'transform_subpath': False}
return parts_dict
@pytest.fixture()
def uparts_with_subpath():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/tm/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1',
'sub_path': 'sp',
'suffix': '/members/m1',
'transform_name': False,
'transform_subpath': False}
return parts_dict
@pytest.fixture()
def transform_name_w_subpath():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/tm/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1: 1.1.1.1/24 bar1: /Common/DC1',
'sub_path': 'ltm:/sp',
'suffix': '/members/m1',
'transform_name': True,
'transform_subpath': True}
return parts_dict
@pytest.fixture()
def uparts_shared():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/shared/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1',
'sub_path': '',
'suffix': '/members/m1'}
return parts_dict
@pytest.fixture()
def uparts_cm():
parts_dict = {'base_uri': 'https://0.0.0.0/mgmt/cm/root/RESTiface/',
'partition': 'BIGCUSTOMER',
'name': 'foobar1',
'sub_path': '',
'suffix': '/members/m1'}
return parts_dict
# Test invalid args
def test_iCRS_with_invalid_construction():
with pytest.raises(TypeError) as UTE:
session.iControlRESTSession('admin', 'admin', what='foble')
assert str(UTE.value) == "Unexpected **kwargs: {'what': 'foble'}"
# Test uri component validation
def test_incorrect_uri_construction_bad_scheme(uparts):
uparts['base_uri'] = 'hryttps://0.0.0.0/mgmt/tm/root/RESTiface/'
with pytest.raises(session.InvalidScheme) as IS:
session.generate_bigip_uri(**uparts)
assert str(IS.value) == 'hryttps'
def test_incorrect_uri_construction_bad_mgmt_path(uparts):
uparts['base_uri'] = 'https://0.0.0.0/magmt/tm/root/RESTiface'
with pytest.raises(session.InvalidBigIP_ICRURI) as IR:
session.generate_bigip_uri(**uparts)
assert "But it's: '/magmt/tm/root/RESTiface'" in str(IR.value)
def test_incorrect_uri_construction_bad_base_nonslash_last(uparts):
uparts['base_uri'] = 'https://0.0.0.0/mgmt/tm/root/RESTiface'
with pytest.raises(session.InvalidPrefixCollection) as IR:
session.generate_bigip_uri(**uparts)
test_value = "prefix_collections path element must end with '/', but" +\
" it's: root/RESTiface"
assert str(IR.value) == test_value
def test_incorrect_uri_construction_illegal_slash_partition_char(uparts):
uparts['partition'] = 'spam/ham'
with pytest.raises(session.InvalidInstanceNameOrFolder) as II:
session.generate_bigip_uri(**uparts)
test_value = "instance names and partitions cannot contain '/', but" +\
" it's: %s" % uparts['partition']
assert str(II.value) == test_value
def test_incorrect_uri_construction_illegal_tilde_partition_char(uparts):
uparts['partition'] = 'spam~ham'
with pytest.raises(session.InvalidInstanceNameOrFolder) as II:
session.generate_bigip_uri(**uparts)
test_value = "instance names and partitions cannot contain '~', but" +\
" it's: %s" % uparts['partition']
assert str(II.value) == test_value
def test_incorrect_uri_construction_illegal_suffix_nonslash_first(uparts):
uparts['suffix'] = 'ham'
with pytest.raises(session.InvalidSuffixCollection) as II:
session.generate_bigip_uri(**uparts)
test_value = "suffix_collections path element must start with '/', but " +\
"it's: %s" % uparts['suffix']
assert str(II.value) == test_value
def test_incorrect_uri_construction_illegal_suffix_slash_last(uparts):
uparts['suffix'] = '/ham/'
with pytest.raises(session.InvalidSuffixCollection) as II:
session.generate_bigip_uri(**uparts)
test_value = "suffix_collections path element must not end with '/', " +\
"but it's: %s" % uparts['suffix']
assert str(II.value) == test_value
# Test uri construction
def test_correct_uri_construction_partitionless(uparts):
uparts['partition'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/foobar1/members/m1'
def test_correct_uri_construction_partitionless_subpath(uparts_with_subpath):
uparts_with_subpath['partition'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**uparts_with_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_construction_nameless(uparts):
uparts['name'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri ==\
"https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER/members/m1"
def test_correct_uri_construction_nameless_subpath(uparts_with_subpath):
uparts_with_subpath['name'] = ''
uri = session.generate_bigip_uri(**uparts_with_subpath)
assert uri ==\
"https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER~sp/members/m1"
def test_correct_uri_construction_partitionless_and_nameless(uparts):
uparts['partition'] = ''
uparts['name'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri == "https://0.0.0.0/mgmt/tm/root/RESTiface/members/m1"
def test_correct_uri_construction_partitionless_and_nameless_subpath(
uparts_with_subpath):
uparts_with_subpath['partition'] = ''
uparts_with_subpath['name'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**uparts_with_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_construction_partition_name_and_suffixless(uparts):
uparts['partition'] = ''
uparts['name'] = ''
uparts['suffix'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri == "https://0.0.0.0/mgmt/tm/root/RESTiface/"
def test_correct_uri_construction_partition_name_and_suffixless_subpath(
uparts_with_subpath):
uparts_with_subpath['partition'] = ''
uparts_with_subpath['name'] = ''
uparts_with_subpath['suffix'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**uparts_with_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_construction_partitionless_and_suffixless(uparts):
uparts['partition'] = ''
uparts['suffix'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/foobar1'
def test_correct_uri_construction_partitionless_and_suffixless_subpath(
uparts_with_subpath):
uparts_with_subpath['partition'] = ''
uparts_with_subpath['suffix'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**uparts_with_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_construction_nameless_and_suffixless(uparts):
uparts['name'] = ''
uparts['suffix'] = ''
uri = session.generate_bigip_uri(**uparts)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER'
def test_correct_uri_construction_nameless_and_suffixless_subpath(
uparts_with_subpath):
uparts_with_subpath['name'] = ''
uparts_with_subpath['suffix'] = ''
uri = session.generate_bigip_uri(**uparts_with_subpath)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER~sp'
def test_correct_uri_construction_partitionless_transform_name(transform_name):
transform_name['partition'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri == \
'https://0.0.0.0/mgmt/tm/root/RESTiface/foobar1: ' \
'1.1.1.1~24 bar1: ~Common~DC1/members/m1'
def test_correct_uri_transformed_partitionless_subpath(
transform_name_w_subpath):
transform_name_w_subpath['partition'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**transform_name_w_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_transformed_nameless(transform_name):
transform_name['name'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri ==\
"https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER/members/m1"
def test_correct_uri_transformed_nameless_subpath(transform_name_w_subpath):
transform_name_w_subpath['name'] = ''
uri = session.generate_bigip_uri(**transform_name_w_subpath)
assert uri ==\
"https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER~ltm:~sp/members/m1"
def test_correct_uri_transformed_partitionless_and_nameless(transform_name):
transform_name['partition'] = ''
transform_name['name'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri == "https://0.0.0.0/mgmt/tm/root/RESTiface/members/m1"
def test_correct_uri_transformed_partitionless_and_nameless_subpath(
transform_name_w_subpath):
transform_name_w_subpath['partition'] = ''
transform_name_w_subpath['name'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**transform_name_w_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_transformed_partition_name_and_suffixless(transform_name):
transform_name['partition'] = ''
transform_name['name'] = ''
transform_name['suffix'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri == "https://0.0.0.0/mgmt/tm/root/RESTiface/"
def test_correct_uri_transformed_partition_name_and_suffixless_subpath(
transform_name_w_subpath):
transform_name_w_subpath['partition'] = ''
transform_name_w_subpath['name'] = ''
transform_name_w_subpath['suffix'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**transform_name_w_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_transformed_partitionless_and_suffixless(transform_name):
transform_name['partition'] = ''
transform_name['suffix'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri == \
'https://0.0.0.0/mgmt/tm/root/RESTiface/foobar1: ' \
'1.1.1.1~24 bar1: ~Common~DC1'
def test_correct_uri_transformed_partitionless_and_suffixless_subpath(
transform_name_w_subpath):
transform_name_w_subpath['partition'] = ''
transform_name_w_subpath['suffix'] = ''
with pytest.raises(session.InvalidURIComponentPart) as IC:
session.generate_bigip_uri(**transform_name_w_subpath)
assert str(IC.value) == \
'When giving the subPath component include partition as well.'
def test_correct_uri_transformed_nameless_and_suffixless(transform_name):
transform_name['name'] = ''
transform_name['suffix'] = ''
uri = session.generate_bigip_uri(**transform_name)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER'
def test_correct_uri_transformed_nameless_and_suffixless_subpath(
transform_name_w_subpath):
transform_name_w_subpath['name'] = ''
transform_name_w_subpath['suffix'] = ''
uri = session.generate_bigip_uri(**transform_name_w_subpath)
assert uri == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~BIGCUSTOMER~ltm:~sp'
def test_correct_uri_construction_mgmt_shared(uparts_shared):
uparts_shared['name'] = ''
uparts_shared['suffix'] = ''
uri = session.generate_bigip_uri(**uparts_shared)
assert uri == 'https://0.0.0.0/mgmt/shared/root/RESTiface/~BIGCUSTOMER'
def test_correct_uri_construction_mgmt_cm(uparts_cm):
uparts_cm['name'] = ''
uparts_cm['suffix'] = ''
uri = session.generate_bigip_uri(**uparts_cm)
assert uri == 'https://0.0.0.0/mgmt/cm/root/RESTiface/~BIGCUSTOMER'
# Test exception handling
def test_wrapped_delete_success(iCRS, uparts):
iCRS.delete(uparts['base_uri'], partition='AFN', name='AIN', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
def test_wrapped_delete_207_fail(iCRS, uparts):
iCRS.session.send.return_value.status_code = 207
with pytest.raises(session.iControlUnexpectedHTTPError) as ex:
iCRS.delete(uparts['base_uri'], partition='A_FOLDER_NAME', name='AN_INSTANCE_NAME')
assert str(ex.value).startswith('207 Unexpected Error: ')
def test_wrapped_get_success(iCRS, uparts):
iCRS.get(uparts['base_uri'], partition='AFN', name='AIN', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
def test_wrapped_get_success_with_suffix(iCRS, uparts):
iCRS.get(uparts['base_uri'], partition='AFN', name='AIN', suffix=uparts['suffix'], uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN/members/m1'
def test_wrapped_get_207_fail(iCRS, uparts):
iCRS.session.send.return_value.status_code = 207
with pytest.raises(session.iControlUnexpectedHTTPError) as ex:
iCRS.get(uparts['base_uri'], partition='A_FOLDER_NAME', name='AN_INSTANCE_NAME')
assert str(ex.value).startswith('207 Unexpected Error: ')
def test_wrapped_patch_success(iCRS, uparts):
iCRS.patch(uparts['base_uri'], partition='AFN', name='AIN', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == []
def test_wrapped_patch_207_fail(iCRS, uparts):
iCRS.session.send.return_value.status_code = 207
with pytest.raises(session.iControlUnexpectedHTTPError) as ex:
iCRS.patch(uparts['base_uri'], partition='A_FOLDER_NAME', name='AN_INSTANCE_NAME')
assert str(ex.value).startswith('207 Unexpected Error: ')
def test_wrapped_put_207_fail(iCRS, uparts):
iCRS.session.send.return_value.status_code = 207
with pytest.raises(session.iControlUnexpectedHTTPError) as ex:
iCRS.put(uparts['base_uri'], partition='A_FOLDER_NAME', name='AN_INSTANCE_NAME')
assert str(ex.value).startswith('207 Unexpected Error: ')
def test_wrapped_post_207_fail(iCRS, uparts):
iCRS.session.send.return_value.status_code = 207
with pytest.raises(session.iControlUnexpectedHTTPError) as ex:
iCRS.post(uparts['base_uri'], partition='A_FOLDER_NAME', name='AN_INSTANCE_NAME')
assert str(ex.value).startswith('207 Unexpected Error: ')
def test_wrapped_post_success(iCRS, uparts):
iCRS.post(uparts['base_uri'], partition='AFN', name='AIN', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == []
assert iCRS.session.prepare_request.call_args[0][0].json is None
def test_wrapped_post_success_with_data(iCRS, uparts):
iCRS.post(uparts['base_uri'], partition='AFN', name='AIN', data={'a': 1}, uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == {'a': 1}
assert iCRS.session.prepare_request.call_args[0][0].json is None
def test_wrapped_post_success_with_json(iCRS, uparts):
iCRS.post(uparts['base_uri'], partition='AFN', name='AIN', json='{"a": 1}', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == []
assert iCRS.session.prepare_request.call_args[0][0].json == '{"a": 1}'
def test_wrapped_post_success_with_json_and_data(iCRS, uparts):
iCRS.post(uparts['base_uri'], partition='AFN', name='AIN', data={'a': 1}, json='{"a": 1}', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == {'a': 1}
assert iCRS.session.prepare_request.call_args[0][0].json == '{"a": 1}'
def test_wrapped_post_success_with_json_and_data_bytestring(iCRSBytes, uparts):
iCRSBytes.post(uparts['base_uri'], partition='AFN', name='AIN', data={'a': 1}, json='{"a": 1}', uri_as_parts=True)
assert isinstance(iCRSBytes.session.prepare_request.call_args[0][0], requests.Request)
assert iCRSBytes.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRSBytes.session.prepare_request.call_args[0][0].data == {'a': 1}
assert iCRSBytes.session.prepare_request.call_args[0][0].json == '{"a": 1}'
def test_wrapped_put_success(iCRS, uparts):
iCRS.put(uparts['base_uri'], partition='AFN', name='AIN', uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == []
def test_wrapped_put_success_with_data(iCRS, uparts):
iCRS.put(uparts['base_uri'], partition='AFN', name='AIN', data={'b': 2}, uri_as_parts=True)
assert isinstance(iCRS.session.prepare_request.call_args[0][0], requests.Request)
assert iCRS.session.prepare_request.call_args[0][0].url == 'https://0.0.0.0/mgmt/tm/root/RESTiface/~AFN~AIN'
assert iCRS.session.prepare_request.call_args[0][0].data == {'b': 2}
def test___init__user_agent():
icrs = session.iControlRESTSession('admin', 'admin')
assert UA in icrs.session.headers['User-Agent']
def test__append_user_agent():
icrs = session.iControlRESTSession('admin', 'admin')
icrs.append_user_agent('test-user-agent/1.1.1')
assert icrs.session.headers['User-Agent'].endswith('test-user-agent/1.1.1')
assert UA in icrs.session.headers['User-Agent']
def test_append_user_agent_empty_start():
icrs = session.iControlRESTSession('admin', 'admin')
icrs.session.headers['User-Agent'] = ''
icrs.append_user_agent('test-agent')
assert icrs.session.headers['User-Agent'] == 'test-agent'
def test___init__with_additional_user_agent():
icrs = session.iControlRESTSession(
'admin',
'admin',
user_agent='test-agent/1.2.3'
)
assert icrs.session.headers['User-Agent'].endswith('test-agent/1.2.3')
assert 'f5-icontrol-rest-python' in icrs.session.headers['User-Agent']
def test__init__without_verify():
icrs = session.iControlRESTSession('test_name', 'test_pw', token=True)
assert icrs.session.verify is False
assert icrs.session.auth.verify is False
def test__init__with_verify():
icrs = session.iControlRESTSession(
'test_name', 'test_pw', token=True, verify='/path/to/cert'
)
assert icrs.session.verify is '/path/to/cert'
assert icrs.session.auth.verify is '/path/to/cert'
| {
"content_hash": "ba9f237f1383421d8d495b5b4ac9e712",
"timestamp": "",
"source": "github",
"line_count": 541,
"max_line_length": 123,
"avg_line_length": 40.82439926062847,
"alnum_prop": 0.6682061034139274,
"repo_name": "F5Networks/f5-icontrol-rest-python",
"id": "2ebea6a2dfcc3c3565443b6efadb421b2c102393",
"size": "22675",
"binary": false,
"copies": "1",
"ref": "refs/heads/1.0",
"path": "icontrol/test/unit/test_session.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "361"
},
{
"name": "Makefile",
"bytes": "660"
},
{
"name": "Python",
"bytes": "93524"
},
{
"name": "Shell",
"bytes": "4198"
}
],
"symlink_target": ""
} |
import os
import sys
from ctk_cli import CLIArgumentParser
# Append ITK libs
sys.path.append(os.path.join(os.environ['ITK_BUILD_DIR'],
'Wrapping/Generators/Python'))
sys.path.append(os.path.join(os.environ['ITK_BUILD_DIR'], 'lib')
# Append TubeTK libs :
sys.path.append(os.environ['TUBETK_BUILD_DIR'], 'TubeTK-build/lib/TubeTK')
import itk
from itk import TubeTKITK as itktube
def run(args):
PixelType = itk.UC
Dimension = 3
# Read tre file
TubeFileReaderType = itk.SpatialObjectReader[Dimension]
tubeFileReader = TubeFileReaderType.New()
tubeFileReader.SetFileName(args.inputTubeFile)
tubeFileReader.Update()
# Read template image
TemplateImageType = itk.Image[PixelType, Dimension]
TemplateImageReaderType = itk.ImageFileReader[TemplateImageType]
templateImageReader = TemplateImageReaderType.New()
templateImageReader.SetFileName(args.inputTemplateImage)
templateImageReader.Update()
# call ConvertTubesToImage
TubesToImageFilterType = itktube.ConvertTubesToImage[Dimension, PixelType]
tubesToImageFilter = TubesToImageFilterType.New()
tubesToImageFilter.SetUseRadius(args.useRadii)
tubesToImageFilter.SetTemplateImage(templateImageReader.GetOutput())
tubesToImageFilter.SetInput(tubeFileReader.GetOutput())
# write output image
TubeImageWriterType = itk.ImageFileWriter[TemplateImageType]
tubeImageWriter = TubeImageWriterType.New()
tubeImageWriter.SetInput(tubesToImageFilter.GetOutput())
tubeImageWriter.SetFileName(args.outputImageFile)
tubeImageWriter.Update()
if __name__ == "__main__":
run(CLIArgumentParser().parse_args())
| {
"content_hash": "457c6a9df6618265572b185cd8894d04",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 76,
"avg_line_length": 31.346153846153847,
"alnum_prop": 0.7766871165644171,
"repo_name": "aylward/ITKTubeTK",
"id": "3c451bea49e6351334e1baf8acadbe1bd0990f2f",
"size": "1630",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "examples/Applications/ConvertTubesToImage/Testing/ConvertTubesToImageTest.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "13419"
},
{
"name": "C++",
"bytes": "3271086"
},
{
"name": "CMake",
"bytes": "96467"
},
{
"name": "Python",
"bytes": "72225"
},
{
"name": "Shell",
"bytes": "23057"
}
],
"symlink_target": ""
} |
import os
import sys
import re
import web
import karesansui
from karesansui.lib.rest import Rest, auth
from karesansui.db.model._2pysilhouette import Job, JobGroup
from karesansui.db.access.machine import findbyhost1
from karesansui.db.access._2pysilhouette import save_job_collaboration
from karesansui.db.access.machine2jobgroup import new as m2j_new
from pysilhouette.command import dict2command
from karesansui.lib.utils import get_ifconfig_info, get_bonding_info, dict_ksort, available_virt_mechs, is_param
from karesansui.lib.const import BONDING_COMMAND_ADD, NETWORK_COMMAND_RESTART, BONDING_MODE
from karesansui.lib.checker import Checker, \
CHECK_EMPTY, CHECK_VALID
NETWORK_RESTART = 1
def validates_bonding(obj, target_regex):
checker = Checker()
check = True
_ = obj._
checker.errors = []
count = 0
for input in obj.input:
m = target_regex.match(input)
if m:
count += 1
check = checker.check_netdev_name(_('Target Device Name'),
m.group('dev'),
CHECK_EMPTY | CHECK_VALID,
) and check
if count < 2:
check = False
checker.add_error(_('Not enough target devices for bonding.'))
if is_param(obj.input, 'bonding_target_dev_primary'):
check = checker.check_netdev_name(_('Primary Device Name'),
obj.input.bonding_target_dev_primary,
CHECK_EMPTY | CHECK_VALID,
) and check
else:
check = False
checker.add_error(_('"%s" is required.') %_('Primary Device Name'))
if is_param(obj.input, 'bonding_mode'):
if obj.input.bonding_mode not in BONDING_MODE:
check = False
checker.add_error(_('Unknown bonding mode.'))
else:
check = False
checker.add_error(_('"%s" is required.') %_('Bonding Mode'))
obj.view.alert = checker.errors
return check
class HostBy1NetworkSettings(Rest):
@auth
def _GET(self, *param, **params):
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
self.view.host_id = host_id
bond_list = get_bonding_info()
if self.is_mode_input() is True:
exist_bond_max_num = -1
exist_bond_list = get_ifconfig_info("regex:^bond")
for bond_name in exist_bond_list.keys():
try:
num = int(bond_name.replace("bond",""))
except ValueError:
continue
if exist_bond_max_num < num:
exist_bond_max_num = num
self.view.create_bond_name = "bond%s" % (exist_bond_max_num + 1)
dev_list = get_ifconfig_info("regex:^eth")
for bond in bond_list:
for slave in bond_list[bond]['slave']:
if slave in dev_list:
dev_list[slave]['bond'] = bond
#pysical_dev_list = get_ifconfig_info("regex:^peth")
pysical_dev_list = get_ifconfig_info("regex:^br")
for pysical_dev in pysical_dev_list:
if pysical_dev[1:] in dev_list:
dev_list[pysical_dev[1:]]['bridge'] = pysical_dev
self.view.bond_target_dev = dev_list
self.view.hypervisors = available_virt_mechs()
return True
dev_list = get_ifconfig_info()
for bond in bond_list:
if bond in dev_list:
dev_list[bond]['bond'] = True
for slave in bond_list[bond]['slave']:
for dev in dev_list:
if dev == slave:
dev_list[dev]['bond'] = True
self.view.current = dev_list
self.view.bond_list = bond_list
return True
@auth
def _POST(self, *param, **params):
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
target_regex = re.compile(r"^bonding_target_dev_select_(?P<dev>eth[0-9]+)")
if not validates_bonding(self, target_regex):
self.logger.debug("Add bonding failed. Did not validate.")
return web.badrequest(self.view.alert)
target_dev = []
for input in self.input:
m = target_regex.match(input)
if m:
target_dev.append(m.group('dev'))
primary = self.input.bonding_target_dev_primary
mode = self.input.bonding_mode
cmdname = u"Add Bonding Setting"
cmd = BONDING_COMMAND_ADD
options = {}
options['dev'] = ','.join(target_dev)
options["primary"] = primary
options["mode"] = mode
_cmd = dict2command(
"%s/%s" % (karesansui.config['application.bin.dir'], cmd), options)
_jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey'])
_job = Job('%s command' % cmdname, 0, _cmd)
_jobgroup.jobs.append(_job)
host = findbyhost1(self.orm, host_id)
_machine2jobgroup = m2j_new(machine=host,
jobgroup_id=-1,
uniq_key=karesansui.sheconf['env.uniqkey'],
created_user=self.me,
modified_user=self.me,
)
save_job_collaboration(self.orm,
self.pysilhouette.orm,
_machine2jobgroup,
_jobgroup,
)
return web.accepted()
@auth
def _PUT(self, *param, **params):
host_id = self.chk_hostby1(param)
if host_id is None: return web.notfound()
self.view.host_id = host_id
host = findbyhost1(self.orm, host_id)
status = int(self.input.status)
if status != NETWORK_RESTART:
return web.badrequest()
cmdname = u"Restart Network"
cmd = NETWORK_COMMAND_RESTART
options = {}
_cmd = dict2command(
"%s/%s" % (karesansui.config['application.bin.dir'], cmd), options)
_jobgroup = JobGroup(cmdname, karesansui.sheconf['env.uniqkey'])
_job = Job('%s command' % cmdname, 0, _cmd)
_jobgroup.jobs.append(_job)
host = findbyhost1(self.orm, host_id)
_machine2jobgroup = m2j_new(machine=host,
jobgroup_id=-1,
uniq_key=karesansui.sheconf['env.uniqkey'],
created_user=self.me,
modified_user=self.me,
)
save_job_collaboration(self.orm,
self.pysilhouette.orm,
_machine2jobgroup,
_jobgroup,
)
return web.accepted()
urls = (
'/host/(\d+)/networksettings/?(\.part)$', HostBy1NetworkSettings,
)
| {
"content_hash": "ee9b2d55864df8dc6c9e9fa9fe27e137",
"timestamp": "",
"source": "github",
"line_count": 210,
"max_line_length": 112,
"avg_line_length": 34.26190476190476,
"alnum_prop": 0.5205003474635164,
"repo_name": "karesansui/karesansui",
"id": "ab6a6f59f9618c75a7563f4314a2af46e8af5265",
"size": "8354",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "karesansui/gadget/hostby1networksettings.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "79865"
},
{
"name": "HTML",
"bytes": "32774"
},
{
"name": "JavaScript",
"bytes": "286445"
},
{
"name": "Makefile",
"bytes": "265"
},
{
"name": "Python",
"bytes": "2226164"
},
{
"name": "Shell",
"bytes": "18293"
}
],
"symlink_target": ""
} |
from AccessControl import ClassSecurityInfo
from Products.Archetypes.utils import DisplayList
from Products.Archetypes.Registry import registerWidget
from Products.Archetypes.Widget import TypesWidget
from Products.CMFPlone.i18nl10n import ulocalized_time
from Products.CMFCore.utils import getToolByName
from bika.lims.browser import BrowserView
from bika.lims.locales import COUNTRIES,STATES,DISTRICTS
import json
import plone
class AddressWidget(TypesWidget):
_properties = TypesWidget._properties.copy()
_properties.update({
'macro': "bika_widgets/addresswidget",
'helper_js': ("bika_widgets/addresswidget.js",),
'helper_css': ("bika_widgets/addresswidget.css",),
'showLegend': True,
'showDistrict': True,
'showCopyFrom': True,
'showCity': True,
'showPostalCode': True,
'showAddress': True,
})
security = ClassSecurityInfo()
# The values in the form/field are always
# Country Name, State Name, District Name.
def getCountries(self):
items = []
items = [(x['ISO'], x['Country']) for x in COUNTRIES]
items.sort(lambda x,y: cmp(x[1], y[1]))
return items
def getDefaultCountry(self):
portal = getToolByName(self, 'portal_url').getPortalObject()
bs = portal._getOb('bika_setup')
return bs.getDefaultCountry()
def getStates(self, country):
items = []
if not country:
return items
# get ISO code for country
iso = [c for c in COUNTRIES if c['Country'] == country or c['ISO'] == country]
if not iso:
return items
iso = iso[0]['ISO']
items = [x for x in STATES if x[0] == iso]
items.sort(lambda x,y: cmp(x[2], y[2]))
return items
def getDistricts(self, country, state):
items = []
if not country or not state:
return items
# get ISO code for country
iso = [c for c in COUNTRIES if c['Country'] == country or c['ISO'] == country]
if not iso:
return items
iso = iso[0]['ISO']
# get NUMBER of the state for lookup
snr = [s for s in STATES if s[0] == iso and s[2] == state]
if not snr:
return items
snr = snr[0][1]
items = [x for x in DISTRICTS if x[0] == iso and x[1] == snr]
items.sort(lambda x,y: cmp(x[1], y[1]))
return items
registerWidget(AddressWidget,
title = 'Address Widget',
description = ('Simple address widget with country/state lookups'),
)
| {
"content_hash": "d9986a698a4333e4ee556448714b0735",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 86,
"avg_line_length": 34.276315789473685,
"alnum_prop": 0.6053742802303262,
"repo_name": "hocinebendou/bika.gsoc",
"id": "a67e1002c1ec0bf787f5aa05643978b85ad169d6",
"size": "2605",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "bika/lims/browser/widgets/addresswidget.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "403"
},
{
"name": "COBOL",
"bytes": "5987"
},
{
"name": "CSS",
"bytes": "29758"
},
{
"name": "JavaScript",
"bytes": "411425"
},
{
"name": "Python",
"bytes": "4330980"
},
{
"name": "RobotFramework",
"bytes": "239735"
},
{
"name": "Shell",
"bytes": "11201"
}
],
"symlink_target": ""
} |
import random
import string
def choices_from_mapping(mapping):
''' return a list of tuple (key, value) from dict '''
return list([(k, mapping[k]) for k in mapping])
def generate_random_string(length):
characters = string.letters + string.digits
return ''.join([random.choice(characters) for i in range(length)])
def reverse_key_value_dict(dictionnary):
return {value: key for key, value in dictionnary.items()}
| {
"content_hash": "1bea4b2669fd9caf432b85f3e51d28f3",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 70,
"avg_line_length": 27.3125,
"alnum_prop": 0.7025171624713958,
"repo_name": "h2020-westlife-eu/VRE",
"id": "27d1be8b01729e43f4b1eb61e1018ea5835becec",
"size": "538",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "luna_django_commons/app/helpers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "46730"
},
{
"name": "HTML",
"bytes": "167326"
},
{
"name": "JavaScript",
"bytes": "22031"
},
{
"name": "Python",
"bytes": "310068"
},
{
"name": "Shell",
"bytes": "863"
}
],
"symlink_target": ""
} |
"""Example use of the CFR algorithm on Kuhn Poker."""
from absl import app
from absl import flags
from open_spiel.python.algorithms import cfr
from open_spiel.python.algorithms import exploitability
import pyspiel
FLAGS = flags.FLAGS
flags.DEFINE_integer("iterations", 100, "Number of iterations")
flags.DEFINE_string("game", "kuhn_poker", "Name of the game")
flags.DEFINE_integer("players", 2, "Number of players")
flags.DEFINE_integer("print_freq", 10, "How often to print the exploitability")
def main(_):
game = pyspiel.load_game(FLAGS.game, {"players": FLAGS.players})
cfr_solver = cfr.CFRSolver(game)
for i in range(FLAGS.iterations):
cfr_solver.evaluate_and_update_policy()
if i % FLAGS.print_freq == 0:
conv = exploitability.exploitability(game, cfr_solver.average_policy())
print("Iteration {} exploitability {}".format(i, conv))
if __name__ == "__main__":
app.run(main)
| {
"content_hash": "13332193a401f699478a3f9887d1e712",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 79,
"avg_line_length": 30.566666666666666,
"alnum_prop": 0.7142857142857143,
"repo_name": "deepmind/open_spiel",
"id": "662b53b279734eb852180f04393f6e226d5a1fba",
"size": "1512",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "open_spiel/python/examples/cfr_example.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "6640"
},
{
"name": "C++",
"bytes": "4649139"
},
{
"name": "CMake",
"bytes": "78467"
},
{
"name": "Go",
"bytes": "18010"
},
{
"name": "Julia",
"bytes": "16727"
},
{
"name": "Jupyter Notebook",
"bytes": "148663"
},
{
"name": "Python",
"bytes": "2823600"
},
{
"name": "Rust",
"bytes": "18562"
},
{
"name": "Shell",
"bytes": "51087"
}
],
"symlink_target": ""
} |
import collections
import keyword
import math
import re
from oslo_log import log
import six
from ceilometer.i18n import _
from ceilometer import sample
from ceilometer import transformer
LOG = log.getLogger(__name__)
class ArithmeticTransformer(transformer.TransformerBase):
"""Multi meter arithmetic transformer.
Transformer that performs arithmetic operations
over one or more meters and/or their metadata.
"""
meter_name_re = re.compile(r'\$\(([\w\.\-]+)\)')
def __init__(self, target=None, **kwargs):
super(ArithmeticTransformer, self).__init__(**kwargs)
target = target or {}
self.target = target
self.expr = target.get('expr', '')
self.expr_escaped, self.escaped_names = self.parse_expr(self.expr)
self.required_meters = list(self.escaped_names.values())
self.misconfigured = len(self.required_meters) == 0
if not self.misconfigured:
self.reference_meter = self.required_meters[0]
# convert to set for more efficient contains operation
self.required_meters = set(self.required_meters)
self.cache = collections.defaultdict(dict)
self.latest_timestamp = None
else:
LOG.warn(_('Arithmetic transformer must use at least one'
' meter in expression \'%s\''), self.expr)
def _update_cache(self, _sample):
"""Update the cache with the latest sample."""
escaped_name = self.escaped_names.get(_sample.name, '')
if escaped_name not in self.required_meters:
return
self.cache[_sample.resource_id][escaped_name] = _sample
def _check_requirements(self, resource_id):
"""Check if all the required meters are available in the cache."""
return len(self.cache[resource_id]) == len(self.required_meters)
def _calculate(self, resource_id):
"""Evaluate the expression and return a new sample if successful."""
ns_dict = dict((m, s.as_dict()) for m, s
in six.iteritems(self.cache[resource_id]))
ns = transformer.Namespace(ns_dict)
try:
new_volume = eval(self.expr_escaped, {}, ns)
if math.isnan(new_volume):
raise ArithmeticError(_('Expression evaluated to '
'a NaN value!'))
reference_sample = self.cache[resource_id][self.reference_meter]
return sample.Sample(
name=self.target.get('name', reference_sample.name),
unit=self.target.get('unit', reference_sample.unit),
type=self.target.get('type', reference_sample.type),
volume=float(new_volume),
user_id=reference_sample.user_id,
project_id=reference_sample.project_id,
resource_id=reference_sample.resource_id,
timestamp=self.latest_timestamp,
resource_metadata=reference_sample.resource_metadata
)
except Exception as e:
LOG.warn(_('Unable to evaluate expression %(expr)s: %(exc)s'),
{'expr': self.expr, 'exc': e})
def handle_sample(self, context, _sample):
self._update_cache(_sample)
self.latest_timestamp = _sample.timestamp
def flush(self, context):
new_samples = []
if not self.misconfigured:
for resource_id in self.cache:
if self._check_requirements(resource_id):
new_samples.append(self._calculate(resource_id))
else:
LOG.warn(_('Unable to perform calculation, not all of '
'{%s} are present'),
', '.join(self.required_meters))
self.cache.clear()
return new_samples
@classmethod
def parse_expr(cls, expr):
"""Transforms meter names in the expression into valid identifiers.
:param expr: unescaped expression
:return: A tuple of the escaped expression and a dict representing
the translation of meter names into Python identifiers
"""
class Replacer(object):
"""Replaces matched meter names with escaped names.
If the meter name is not followed by parameter access in the
expression, it defaults to accessing the 'volume' parameter.
"""
def __init__(self, original_expr):
self.original_expr = original_expr
self.escaped_map = {}
def __call__(self, match):
meter_name = match.group(1)
escaped_name = self.escape(meter_name)
self.escaped_map[meter_name] = escaped_name
if (match.end(0) == len(self.original_expr) or
self.original_expr[match.end(0)] != '.'):
escaped_name += '.volume'
return escaped_name
@staticmethod
def escape(name):
has_dot = '.' in name
if has_dot:
name = name.replace('.', '_')
if has_dot or name.endswith('ESC') or name in keyword.kwlist:
name = "_" + name + '_ESC'
return name
replacer = Replacer(expr)
expr = re.sub(cls.meter_name_re, replacer, expr)
return expr, replacer.escaped_map
| {
"content_hash": "e71ea5e6722ab0770178edaea39ccdc1",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 77,
"avg_line_length": 38.892857142857146,
"alnum_prop": 0.5702479338842975,
"repo_name": "r-mibu/ceilometer",
"id": "557ac83dff85485c7496cbaf80b79f509ff62225",
"size": "6020",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "ceilometer/transformer/arithmetic.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "2911624"
},
{
"name": "Shell",
"bytes": "23274"
}
],
"symlink_target": ""
} |
from pyadds.meta import ops
from pyadds import annotate as an
def flatten(its):
return (i for it in its for i in it)
class FloDSL:
def __init__(self, units, sources=None, targets=None, model=None):
self.model = model
self.units = tuple(units)
if sources is None:
sources = [u.out for u in units if hasattr(u, 'out')]
if targets is None:
targets = [u.process for u in units if hasattr(u, 'process')]
self.sources = sources
self.targets = targets
@property
def left(self):
return self.units[0]
@property
def right(self):
return self.units[0]
@classmethod
def join(cls, units):
return cls(flatten(i.dsl.units for i in items),
sources=flatten(i.dsl.sources for i in items),
targets=flatten(i.dsl.targets for i in items))
def __add__(self, other):
return type(self)(self.units + other.units,
targets=self.targets,
sources=other.sources)
def __and__(self, other):
self.model.join(self.right, other.left)
return self + other
def __or__(self, other):
self.model.part(self.right, other.left)
return self + other
def __mul__(self, num):
self.model.bundle(self.units, mul=num)
return self
def __pow__(self, num):
self.model.bundle(self.units, repl=num)
return self
def __xor__(self, key):
self.model.bundle(self.units, map=key)
return self
def __rshift__(self, other):
if isinstance(other, tuple):
return self >> self.join(other)
elif isinstance(other, FloDSL):
for src in self.sources:
for tgt in other.targets:
self.model.link(src, tgt)
return self + other
else:
return NotImplemented
def __iter__(self):
done = set()
return iter(done.add(u) or u for u in self.units if u not in done)
def __str__(self):
return '|{}|'.format(','.join(map(str, self)))
def __repr__(self):
ls = []
ls.append('|{}|'.format(','.join(map(repr, self))))
if self.targets:
ls.append('>>{}'.format(self.targets))
if self.sources:
ls.append('>>{}'.format(self.sources))
return ' '.join(ls)
DSLMixin = ops.autowraped_ops(FloDSL, by='dsl')
| {
"content_hash": "549bc760c2d874b122af34477ec523a4",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 74,
"avg_line_length": 28.045454545454547,
"alnum_prop": 0.5457860615883307,
"repo_name": "wabu/zeroflo",
"id": "88a952caa179f137380d0f6dfe4de7ecdfe9fd11",
"size": "2468",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "zeroflo/top/dsl.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "150743"
}
],
"symlink_target": ""
} |
import gdb
# Load the xmethods if GDB supports them.
def gdb_has_xmethods():
try:
import gdb.xmethod
return True
except ImportError:
return False
def register_libstdcxx_printers(obj):
# Load the pretty-printers.
from .printers import register_libstdcxx_printers
register_libstdcxx_printers(obj)
if gdb_has_xmethods():
from .xmethods import register_libstdcxx_xmethods
register_libstdcxx_xmethods(obj)
| {
"content_hash": "80cd5897795f46a84d32406b892ae0fc",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 57,
"avg_line_length": 26,
"alnum_prop": 0.6944444444444444,
"repo_name": "ChangsoonKim/STM32F7DiscTutor",
"id": "46c9f1c1ad09dfa92b0757a6655626e7de510a6f",
"size": "1161",
"binary": false,
"copies": "27",
"ref": "refs/heads/master",
"path": "toolchain/osx/gcc-arm-none-eabi-6-2017-q1-update/share/gcc-arm-none-eabi/libstdcxx/v6/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "4483749"
},
{
"name": "C",
"bytes": "155831052"
},
{
"name": "C++",
"bytes": "14522753"
},
{
"name": "HTML",
"bytes": "22473152"
},
{
"name": "Logos",
"bytes": "9680"
},
{
"name": "Makefile",
"bytes": "25498"
},
{
"name": "Objective-C",
"bytes": "285838"
},
{
"name": "Python",
"bytes": "288546"
},
{
"name": "Roff",
"bytes": "2842557"
},
{
"name": "Shell",
"bytes": "20768"
},
{
"name": "XC",
"bytes": "9187"
},
{
"name": "XS",
"bytes": "9137"
}
],
"symlink_target": ""
} |
from Memory.Main import *
from Cpu.Main import *
from Disk.Main import *
from Net.Main import *
from Local.Main import *
from Raid.Main import *
from func import *
from urllib2 import Request, urlopen
from urllib import urlencode
from threading import Thread
from json import dumps, loads
from sys import argv, exit
from os import geteuid
from time import sleep
def Jdump(data):
return dumps(data, indent=1)
def Run(Debug=False):
data = {}
threadings = []
TDMList = {
HostName : "NULL",
Memory : "NULL",
Net_Usage : "NULL",
Press : "NULL",
Cpu_Usage : "NULL",
Cpu_Load : "NULL",
Disk_Usage: "NULL",
Disk_IO : "NULL",
}
for Keys,Value in TDMList.items():
threadings.append(Thread(target=Keys, args=(data, )))
for t in threadings:
t.start()
for t in threadings:
t.join()
return data
def is_root():
if geteuid() != 0:
print "Please use the super user root"
exit(1)
if __name__ == '__main__':
is_root()
while True:
sleep(1)
result = Run(Debug=False)
print Jdump(result)
| {
"content_hash": "69cd849831273f00699400775967addc",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 55,
"avg_line_length": 19.90909090909091,
"alnum_prop": 0.6264840182648402,
"repo_name": "selboo/Linux-Monitor",
"id": "934780af8b13979f81a294b86979d5679ee248a6",
"size": "1131",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "main.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "15"
},
{
"name": "Python",
"bytes": "20774"
}
],
"symlink_target": ""
} |
import javabridge
def test_java_bridge():
javabridge.start_vm(run_headless=True)
try:
print(javabridge.run_script('java.lang.String.format("Hello, %s!", name);', dict(name='world')))
finally:
javabridge.kill_vm() | {
"content_hash": "37e3e48f0b5b830c6a16fb4c99ab7bf1",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 104,
"avg_line_length": 26.88888888888889,
"alnum_prop": 0.6528925619834711,
"repo_name": "barancev/testautomation_training",
"id": "61fa6186981c25401939c429daccc0c7b8d8f514",
"size": "242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testautomation_py/lesson2/module1/lesson2_module1_sample1.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C#",
"bytes": "3109"
},
{
"name": "Cucumber",
"bytes": "276"
},
{
"name": "Java",
"bytes": "8234"
},
{
"name": "Python",
"bytes": "15642"
},
{
"name": "RobotFramework",
"bytes": "420"
}
],
"symlink_target": ""
} |
"""Test for tcrm.utils.system_testing_utils."""
import datetime
import os
import shutil
import subprocess
import time
import unittest
import unittest.mock
from plugins.pipeline_plugins.utils import system_testing_utils
def _create_temp_file(file_path, content):
file_dir = os.path.dirname(file_path)
if not os.path.exists(file_dir):
os.mkdir(file_dir)
with open(file_path, 'w') as f:
f.write(content)
class SystemVerificationUtilsTest(unittest.TestCase):
def test_run_shell_cmd_timed_out(self):
with self.assertRaises(subprocess.TimeoutExpired):
system_testing_utils.run_shell_cmd('sleep 2', timeout=1)
def test_run_shell_exit_code_is_not_zero(self):
with self.assertRaises(subprocess.CalledProcessError):
system_testing_utils.run_shell_cmd('exit 1')
def test_get_iso8601_date_str(self):
now = datetime.datetime.now()
ret = system_testing_utils.get_iso8601_date_str(now)
self.assertEqual(
ret,
f'{now.year}-{"{:02d}".format(now.month)}-'
f'{"{:02d}".format(now.day)}T00:00:00+00:00')
def test_get_airflow_home(self):
with unittest.mock.patch.dict('os.environ',
{'AIRFLOW_HOME': '/airflow_home'}):
ret = system_testing_utils.get_airflow_home()
self.assertEqual(ret, '/airflow_home')
def test_create_or_update_airflow_gcp_connection(self):
with unittest.mock.patch(
'plugins.pipeline_plugins.utils.'
'system_testing_utils.run_shell_cmd') as p:
system_testing_utils.create_or_update_airflow_gcp_connection(
'conn_id', 'project_id')
self.assertEqual(p.call_count, 2)
def test_create_or_update_airflow_variable(self):
with unittest.mock.patch(
'plugins.pipeline_plugins.utils.'
'system_testing_utils.run_shell_cmd') as p:
system_testing_utils.create_or_update_airflow_variable(
'key', 'value')
p.assert_called_once()
def test_run_airflow_task(self):
with unittest.mock.patch(
'plugins.pipeline_plugins.utils.'
'system_testing_utils.run_shell_cmd') as p:
system_testing_utils.run_airflow_task('dag_id', 'task_id',
'2020-10-13T00:00:00+00:00')
p.assert_called_once()
def test_get_latest_task_log(self):
airflow_home = '/tmp'
with unittest.mock.patch.dict('os.environ', {'AIRFLOW_HOME': airflow_home}):
dag_id = 'tcrm_bq_to_ga'
task_id = 'bq_to_ga_task'
execution_date = '2020-10-13T00:00:00+00:00'
temp_logs_dir = (f'{airflow_home}/logs/{dag_id}/'
f'{task_id}/{execution_date}')
os.makedirs(temp_logs_dir)
_create_temp_file(os.path.join(temp_logs_dir, '1.log'), 'test1')
time.sleep(0.1)
_create_temp_file(os.path.join(temp_logs_dir, '2.log'), 'test2')
log_content = system_testing_utils.get_latest_task_log(
dag_id, task_id, execution_date)
self.assertEqual(log_content, 'test2')
shutil.rmtree(f'{airflow_home}/logs')
@unittest.mock.patch('google.cloud.bigquery.Client')
def test_insert_rows_to_table(self, mock_client_class):
mock_client = unittest.mock.MagicMock()
mock_client_class.return_value = mock_client
mock_insert = unittest.mock.MagicMock()
mock_client.insert_rows_json = mock_insert
mock_insert.return_value = None
system_testing_utils.insert_rows_to_table([], 'table_id')
mock_insert.assert_called_with('table_id', [])
@unittest.mock.patch('google.cloud.bigquery.Client')
def test_insert_rows_to_table_got_error(self, mock_client_class):
mock_client = unittest.mock.MagicMock()
mock_client_class.return_value = mock_client
mock_insert = unittest.mock.MagicMock()
mock_client.insert_rows_json = mock_insert
mock_insert.return_value = 'error'
with self.assertRaises(RuntimeError):
system_testing_utils.insert_rows_to_table([], 'table_id')
@unittest.mock.patch('google.cloud.secretmanager.SecretManagerServiceClient')
def test_get_payload_from_secret_manager(self, mock_client_class):
path_to_secret = 'path/to/secret'
secret_payload = b'test'
project_id = 'project_id'
secret_name = 'secret_name'
secret_ver = 'secret_ver'
mock_client_class.return_value.secret_version_path.return_value = path_to_secret
mock_client_class.return_value.access_secret_version.return_value.payload.data = secret_payload
actual = system_testing_utils.get_payload_from_secret_manager(
project_id, secret_name, secret_ver)
mock_client_class.return_value.secret_version_path.assert_called_with(
project_id, secret_name, secret_ver)
mock_client_class.return_value.access_secret_version.assert_called_with(
name=path_to_secret)
self.assertEqual(actual, secret_payload.decode('UTF-8'))
if __name__ == '__main__':
unittest.main()
| {
"content_hash": "abe210869d76e13618aeb3d8b507d78d",
"timestamp": "",
"source": "github",
"line_count": 133,
"max_line_length": 99,
"avg_line_length": 36.65413533834587,
"alnum_prop": 0.6703589743589744,
"repo_name": "google/TaglessCRM",
"id": "bf9a3ecd0a8def26638838cdce70baa2edec9689",
"size": "5466",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/test/utils/system_testing_utils_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1309"
},
{
"name": "Python",
"bytes": "452125"
},
{
"name": "Shell",
"bytes": "29340"
}
],
"symlink_target": ""
} |
import urllib2
from base64 import b64decode
import links
# Obfuscated to avoid spoilers
url = b64decode(links.link_02)
# Message is huge, so using urllib2 instead of copy/paste
response = urllib2.urlopen(url)
html = response.read()
# Quick extraction of message from html
msg = html.split('--')[-2].strip('\n')
base_url = 'http://www.pythonchallenge.com/pc/def/{}.html'
decoded = ''.join([c for c in msg if c.isalpha()])
print 'The secret message is', decoded
print base_url.format(decoded) | {
"content_hash": "7c14c927db3ddac11259bb3cc9cf918d",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 58,
"avg_line_length": 26.05263157894737,
"alnum_prop": 0.7292929292929293,
"repo_name": "joeeoj/python-challenge-practice",
"id": "f79294443938766dde9b776598ad6588c538a727",
"size": "541",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "challenge_02.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2493"
}
],
"symlink_target": ""
} |
import os
import sys
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.txt')).read()
requires = ['pyramid',
'SQLAlchemy>=0.8.0,<1.1.0',
'pyramid_tm',
'pyramid_multiauth',
'pyramid_macauth',
'pyramid_mako',
'zope.sqlalchemy',
'colander',
'py-bcrypt',
'requests',
# wsgi server dependencies
'waitress',
'Paste',
# Test dependencies
'WebTest',
'mock',
# Database migrations
'alembic',
]
if sys.version_info[:3] < (2, 5, 0):
requires.append('pysqlite')
setup(name='MAGMaWeb',
version='1.3',
description='MAGMaWeb',
long_description=README + '\n\n' + CHANGES,
classifiers=["Programming Language :: Python",
"Programming Language :: JavaScript",
"Intended Audience :: Science/Research",
"Environment :: Web Environment",
"Natural Language :: English",
"Framework :: Pyramid",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Scientific/Engineering :: Chemistry",
],
author='Stefan Verhoeven',
author_email='[email protected]',
url='http://www.emetabolomics.org',
keywords='web wsgi bfg pylons pyramid cheminformatics',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
test_suite='magmaweb.tests',
install_requires=requires,
entry_points={
'paste.app_factory': [
'main = magmaweb:main',
],
'console_scripts': [
'magma-web = magmaweb.script:main',
],
},
paster_plugins=['pyramid'],
)
| {
"content_hash": "28e7ceb18c0ca99995196a45a01b49dc",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 74,
"avg_line_length": 31.83076923076923,
"alnum_prop": 0.5195746737554374,
"repo_name": "NLeSC/MAGMa",
"id": "46f29e11d975dea98c210d77ceba2075108551ad",
"size": "2069",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/setup.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "1635"
},
{
"name": "Dockerfile",
"bytes": "658"
},
{
"name": "HTML",
"bytes": "28717"
},
{
"name": "JavaScript",
"bytes": "293482"
},
{
"name": "Makefile",
"bytes": "59741"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "576410"
}
],
"symlink_target": ""
} |
"""Client-side fork interop tests as a unit test."""
import subprocess
import sys
import tempfile
import threading
import unittest
from grpc._cython import cygrpc
from tests.fork import methods
# New instance of multiprocessing.Process using fork without exec can and will
# freeze if the Python process has any other threads running. This includes the
# additional thread spawned by our _runner.py class. So in order to test our
# compatibility with multiprocessing, we first fork+exec a new process to ensure
# we don't have any conflicting background threads.
_CLIENT_FORK_SCRIPT_TEMPLATE = """if True:
import os
import sys
from grpc._cython import cygrpc
from tests.fork import methods
cygrpc._GRPC_ENABLE_FORK_SUPPORT = True
os.environ['GRPC_POLL_STRATEGY'] = 'epoll1'
methods.TestCase.%s.run_test({
'server_host': 'localhost',
'server_port': %d,
'use_tls': False
})
"""
_SUBPROCESS_TIMEOUT_S = 30
@unittest.skipUnless(
sys.platform.startswith("linux"),
"not supported on windows, and fork+exec networking blocked on mac")
class ForkInteropTest(unittest.TestCase):
def setUp(self):
start_server_script = """if True:
import sys
import time
import grpc
from src.proto.grpc.testing import test_pb2_grpc
from tests.interop import service as interop_service
from tests.unit import test_common
server = test_common.test_server()
test_pb2_grpc.add_TestServiceServicer_to_server(
interop_service.TestService(), server)
port = server.add_insecure_port('[::]:0')
server.start()
print(port)
sys.stdout.flush()
while True:
time.sleep(1)
"""
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
self._server_process = subprocess.Popen(
[sys.executable, '-c', start_server_script],
stdout=streams[0],
stderr=streams[1])
timer = threading.Timer(_SUBPROCESS_TIMEOUT_S,
self._server_process.kill)
try:
timer.start()
while True:
streams[0].seek(0)
s = streams[0].readline()
if not s:
continue
else:
self._port = int(s)
break
except ValueError:
raise Exception('Failed to get port from server')
finally:
timer.cancel()
def testConnectivityWatch(self):
self._verifyTestCase(methods.TestCase.CONNECTIVITY_WATCH)
def testCloseChannelBeforeFork(self):
self._verifyTestCase(methods.TestCase.CLOSE_CHANNEL_BEFORE_FORK)
def testAsyncUnarySameChannel(self):
self._verifyTestCase(methods.TestCase.ASYNC_UNARY_SAME_CHANNEL)
def testAsyncUnaryNewChannel(self):
self._verifyTestCase(methods.TestCase.ASYNC_UNARY_NEW_CHANNEL)
def testBlockingUnarySameChannel(self):
self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_SAME_CHANNEL)
def testBlockingUnaryNewChannel(self):
self._verifyTestCase(methods.TestCase.BLOCKING_UNARY_NEW_CHANNEL)
def testInProgressBidiContinueCall(self):
self._verifyTestCase(methods.TestCase.IN_PROGRESS_BIDI_CONTINUE_CALL)
def testInProgressBidiSameChannelAsyncCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_ASYNC_CALL)
def testInProgressBidiSameChannelBlockingCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_SAME_CHANNEL_BLOCKING_CALL)
def testInProgressBidiNewChannelAsyncCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_ASYNC_CALL)
def testInProgressBidiNewChannelBlockingCall(self):
self._verifyTestCase(
methods.TestCase.IN_PROGRESS_BIDI_NEW_CHANNEL_BLOCKING_CALL)
def tearDown(self):
self._server_process.kill()
def _verifyTestCase(self, test_case):
script = _CLIENT_FORK_SCRIPT_TEMPLATE % (test_case.name, self._port)
streams = tuple(tempfile.TemporaryFile() for _ in range(2))
process = subprocess.Popen([sys.executable, '-c', script],
stdout=streams[0],
stderr=streams[1])
timer = threading.Timer(_SUBPROCESS_TIMEOUT_S, process.kill)
timer.start()
process.wait()
timer.cancel()
outputs = []
for stream in streams:
stream.seek(0)
outputs.append(stream.read())
self.assertEqual(
0, process.returncode,
'process failed with exit code %d (stdout: "%s", stderr: "%s")' %
(process.returncode, outputs[0], outputs[1]))
if __name__ == '__main__':
unittest.main(verbosity=2)
| {
"content_hash": "745888d2c9b3a24a6ea2e32040eb1eaa",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 80,
"avg_line_length": 34.65734265734266,
"alnum_prop": 0.6285310734463276,
"repo_name": "grpc/grpc",
"id": "1ae2265f1836fb1c72395e9ac84b94270c34ac53",
"size": "5533",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "src/python/grpcio_tests/tests/fork/_fork_interop_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Awk",
"bytes": "5444"
},
{
"name": "Batchfile",
"bytes": "37697"
},
{
"name": "C",
"bytes": "1347288"
},
{
"name": "C#",
"bytes": "113402"
},
{
"name": "C++",
"bytes": "17619422"
},
{
"name": "CMake",
"bytes": "29409"
},
{
"name": "CSS",
"bytes": "1519"
},
{
"name": "Cython",
"bytes": "258997"
},
{
"name": "Dockerfile",
"bytes": "183538"
},
{
"name": "Go",
"bytes": "34794"
},
{
"name": "HTML",
"bytes": "14"
},
{
"name": "Java",
"bytes": "14329"
},
{
"name": "JavaScript",
"bytes": "5572"
},
{
"name": "Objective-C",
"bytes": "724869"
},
{
"name": "Objective-C++",
"bytes": "79586"
},
{
"name": "PHP",
"bytes": "488004"
},
{
"name": "PowerShell",
"bytes": "5008"
},
{
"name": "Python",
"bytes": "3830209"
},
{
"name": "Ruby",
"bytes": "649802"
},
{
"name": "Shell",
"bytes": "779092"
},
{
"name": "Starlark",
"bytes": "881760"
},
{
"name": "Swift",
"bytes": "7487"
},
{
"name": "XSLT",
"bytes": "9846"
}
],
"symlink_target": ""
} |
from __future__ import unicode_literals
from __future__ import absolute_import
import datetime
import json
import logging
import os
import random
import re
import sys
import time
import Queue
import threading
import shelve
import uuid
import urllib2
from geopy.geocoders import GoogleV3
from pgoapi import PGoApi
from pgoapi.utilities import f2i, get_cell_ids
from s2sphere import Cell, CellId, LatLng
from . import cell_workers
from .base_task import BaseTask
from .plugin_loader import PluginLoader
from .api_wrapper import ApiWrapper
from .cell_workers.utils import distance
from .event_manager import EventManager
from .human_behaviour import sleep
from .item_list import Item
from .metrics import Metrics
from .sleep_schedule import SleepSchedule
from pokemongo_bot.event_handlers import SocketIoHandler, LoggingHandler, SocialHandler
from pokemongo_bot.socketio_server.runner import SocketIoRunner
from pokemongo_bot.websocket_remote_control import WebsocketRemoteControl
from pokemongo_bot.base_dir import _base_dir
from .worker_result import WorkerResult
from .tree_config_builder import ConfigException
from .tree_config_builder import MismatchTaskApiVersion
from .tree_config_builder import TreeConfigBuilder
from .inventory import init_inventory, player
from sys import platform as _platform
from pgoapi.protos.pogoprotos.enums import badge_type_pb2
from pgoapi.exceptions import AuthException, NotLoggedInException, ServerSideRequestThrottlingException, ServerBusyOrOfflineException, NoPlayerPositionSetException, HashingOfflineException
from pgoapi.hash_server import HashServer
class FileIOException(Exception):
pass
class PokemonGoBot(object):
@property
def position(self):
return self.api.actual_lat, self.api.actual_lng, self.api.actual_alt
@property
def noised_position(self):
return self.api.noised_lat, self.api.noised_lng, self.api.noised_alt
#@position.setter # these should be called through api now that gps replication is there...
#def position(self, position_tuple):
# self.api._position_lat, self.api._position_lng, self.api._position_alt = position_tuple
@property
def player_data(self):
"""
Returns the player data as received from the API.
:return: The player data.
:rtype: dict
"""
return self._player
@property
def stardust(self):
dust = filter(lambda y: y['name'] == 'STARDUST', self._player['currencies'])[0]
if 'amount' in dust:
return dust['amount']
else:
return 0
@stardust.setter
def stardust(self, value):
dust = filter(lambda y: y['name'] == 'STARDUST', self._player['currencies'])[0]
if 'amount' in dust:
dust['amount'] = value
def __init__(self, db, config):
self.database = db
self.config = config
super(PokemonGoBot, self).__init__()
self.fort_timeouts = dict()
self.pokemon_list = json.load(
open(os.path.join(_base_dir, 'data', 'pokemon.json'))
)
self.item_list = json.load(open(os.path.join(_base_dir, 'data', 'items.json')))
# @var Metrics
self.metrics = Metrics(self)
self.latest_inventory = None
self.cell = None
self.recent_forts = [None] * config.forts_max_circle_size
self.tick_count = 0
self.softban = False
self.wake_location = None
self.start_position = None
self.last_map_object = None
self.last_time_map_object = 0
self.logger = logging.getLogger(type(self).__name__)
self.alt = self.config.gps_default_altitude
# Make our own copy of the workers for this instance
self.workers = []
# Theading setup for file writing
self.web_update_queue = Queue.Queue(maxsize=1)
self.web_update_thread = threading.Thread(target=self.update_web_location_worker)
self.web_update_thread.start()
# Heartbeat limiting
self.heartbeat_threshold = self.config.heartbeat_threshold
self.heartbeat_counter = 0
self.last_heartbeat = time.time()
self.hb_locked = False # lock hb on snip
# Inventory refresh limiting
self.inventory_refresh_threshold = 10
self.inventory_refresh_counter = 0
self.last_inventory_refresh = time.time()
# Catch on/off
self.catch_disabled = False
self.capture_locked = False # lock catching while moving to VIP pokemon
client_id_file_path = os.path.join(_base_dir, 'data', 'mqtt_client_id')
saved_info = shelve.open(client_id_file_path)
key = 'client_id'.encode('utf-8')
if key in saved_info:
self.config.client_id = saved_info[key]
else:
self.config.client_id = str(uuid.uuid4())
saved_info[key] = self.config.client_id
saved_info.close()
def start(self):
self._setup_event_system()
self.sleep_schedule = SleepSchedule(self, self.config.sleep_schedule) if self.config.sleep_schedule else None
if self.sleep_schedule:
self.sleep_schedule.work()
self._setup_api()
self._load_recent_forts()
init_inventory(self)
self.display_player_info()
self._print_character_info()
if self.config.pokemon_bag_show_at_start and self.config.pokemon_bag_pokemon_info:
self._print_list_pokemon()
random.seed()
def _setup_event_system(self):
handlers = []
color = self.config.logging and 'color' in self.config.logging and self.config.logging['color']
debug = self.config.debug
handlers.append(LoggingHandler(color, debug))
handlers.append(SocialHandler(self))
if self.config.websocket_server_url:
if self.config.websocket_start_embedded_server:
self.sio_runner = SocketIoRunner(self.config.websocket_server_url)
self.sio_runner.start_listening_async()
websocket_handler = SocketIoHandler(
self,
self.config.websocket_server_url
)
handlers.append(websocket_handler)
if self.config.websocket_remote_control:
remote_control = WebsocketRemoteControl(self).start()
# @var EventManager
self.event_manager = EventManager(self.config.walker_limit_output, *handlers)
self._register_events()
if self.config.show_events:
self.event_manager.event_report()
sys.exit(1)
# Registering event:
# self.event_manager.register_event("location", parameters=['lat', 'lng'])
#
# Emitting event should be enough to add logging and send websocket
# message: :
# self.event_manager.emit('location', 'level'='info', data={'lat': 1, 'lng':1}),
def _register_events(self):
self.event_manager.register_event(
'location_found',
parameters=('position', 'location')
)
self.event_manager.register_event('api_error')
self.event_manager.register_event('config_error')
self.event_manager.register_event('captcha')
self.event_manager.register_event('login_started')
self.event_manager.register_event('login_failed')
self.event_manager.register_event('login_successful')
self.event_manager.register_event('niantic_warning')
self.event_manager.register_event('set_start_location')
self.event_manager.register_event('load_cached_location')
self.event_manager.register_event('location_cache_ignored')
self.event_manager.register_event('debug')
self.event_manager.register_event('refuse_to_sit')
self.event_manager.register_event('reset_destination')
self.event_manager.register_event('new_destination')
self.event_manager.register_event('moving_to_destination')
self.event_manager.register_event('arrived_at_destination')
self.event_manager.register_event('staying_at_destination')
self.event_manager.register_event('buddy_pokemon', parameters=('pokemon', 'iv', 'cp'))
self.event_manager.register_event('buddy_reward', parameters=('pokemon', 'family', 'candy_earned', 'candy'))
self.event_manager.register_event('buddy_walked', parameters=('pokemon', 'distance_walked', 'distance_needed'))
# ignore candy above threshold
self.event_manager.register_event(
'ignore_candy_above_thresold',
parameters=(
'name',
'amount',
'threshold'
)
)
self.event_manager.register_event('followpath_output_disabled')
self.event_manager.register_event(
'position_update',
parameters=(
'current_position',
'last_position',
'distance', # optional
'distance_unit' # optional
)
)
self.event_manager.register_event(
'path_lap_update',
parameters=(
'number_lap',
'number_lap_max'
)
)
self.event_manager.register_event(
'path_lap_end',
parameters=(
'duration',
'resume'
)
)
self.event_manager.register_event('location_cache_error')
self.event_manager.register_event('security_check')
self.event_manager.register_event('bot_start')
self.event_manager.register_event('bot_exit')
self.event_manager.register_event('bot_interrupted')
# sleep stuff
self.event_manager.register_event(
'next_sleep',
parameters=(
'time',
'duration'
)
)
self.event_manager.register_event(
'bot_sleep',
parameters=(
'time_hms',
'wake'
)
)
# random pause
self.event_manager.register_event(
'next_random_pause',
parameters=(
'time',
'duration'
)
)
self.event_manager.register_event(
'bot_random_pause',
parameters=(
'time_hms',
'resume'
)
)
# recycle stuff
self.event_manager.register_event(
'next_force_recycle',
parameters=(
'time'
)
)
self.event_manager.register_event('force_recycle')
# random alive pause
self.event_manager.register_event(
'next_random_alive_pause',
parameters=(
'time',
'duration'
)
)
self.event_manager.register_event(
'bot_random_alive_pause',
parameters=(
'time_hms',
'resume'
)
)
# fort stuff
self.event_manager.register_event(
'spun_fort',
parameters=(
'fort_id',
'latitude',
'longitude'
)
)
self.event_manager.register_event(
'lured_pokemon_found',
parameters=(
'fort_id',
'fort_name',
'encounter_id',
'latitude',
'longitude'
)
)
self.event_manager.register_event(
'moving_to_fort',
parameters=(
'fort_name',
'distance'
)
)
self.event_manager.register_event(
'moving_to_lured_fort',
parameters=(
'fort_name',
'distance',
'lure_distance'
)
)
self.event_manager.register_event(
'spun_pokestop',
parameters=(
'pokestop', 'exp', 'items'
)
)
self.event_manager.register_event(
'pokestop_empty',
parameters=('pokestop',)
)
self.event_manager.register_event(
'pokestop_out_of_range',
parameters=('pokestop',)
)
self.event_manager.register_event(
'pokestop_on_cooldown',
parameters=('pokestop', 'minutes_left')
)
self.event_manager.register_event(
'unknown_spin_result',
parameters=('status_code',)
)
self.event_manager.register_event('pokestop_searching_too_often')
self.event_manager.register_event('arrived_at_fort')
# pokemon stuff
self.event_manager.register_event(
'catchable_pokemon',
parameters=(
'pokemon_id',
'spawn_point_id',
'encounter_id',
'latitude',
'longitude',
'expiration_timestamp_ms',
'pokemon_name'
)
)
self.event_manager.register_event(
'incensed_pokemon_found',
parameters=(
'pokemon_id',
'encounter_id',
'encounter_location',
'latitude',
'longitude'
)
)
self.event_manager.register_event(
'pokemon_appeared',
parameters=(
'pokemon',
'ncp',
'cp',
'iv',
'iv_display',
'encounter_id',
'latitude',
'longitude',
'pokemon_id',
'shiny'
)
)
self.event_manager.register_event('no_pokeballs')
self.event_manager.register_event('enough_ultraballs')
self.event_manager.register_event(
'pokemon_catch_rate',
parameters=(
'catch_rate',
'ball_name',
'berry_name',
'berry_count'
)
)
self.event_manager.register_event(
'threw_berry',
parameters=(
'berry_name',
'ball_name',
'new_catch_rate'
)
)
self.event_manager.register_event(
'threw_pokeball',
parameters=(
'throw_type',
'spin_label',
'ball_name',
'success_percentage',
'count_left'
)
)
self.event_manager.register_event(
'pokemon_capture_failed',
parameters=('pokemon',)
)
self.event_manager.register_event(
'pokemon_vanished',
parameters=(
'pokemon',
'encounter_id',
'latitude',
'longitude',
'pokemon_id'
)
)
self.event_manager.register_event(
'vanish_limit_reached',
parameters=(
'duration',
'resume'
)
)
self.event_manager.register_event('pokemon_not_in_range')
self.event_manager.register_event('pokemon_inventory_full')
self.event_manager.register_event(
'pokemon_caught',
parameters=(
'pokemon',
'ncp', 'cp', 'iv', 'iv_display', 'exp',
'shiny',
'stardust',
'encounter_id',
'latitude',
'longitude',
'pokemon_id',
'daily_catch_limit',
'caught_last_24_hour',
)
)
self.event_manager.register_event(
'pokemon_vip_caught',
parameters=(
'pokemon',
'ncp', 'cp', 'iv', 'iv_display', 'exp',
'shiny',
'stardust',
'encounter_id',
'latitude',
'longitude',
'pokemon_id',
'daily_catch_limit',
'caught_last_24_hour',
)
)
self.event_manager.register_event(
'pokemon_evolved',
parameters=('pokemon', 'iv', 'old_cp', 'cp', 'candy', 'xp')
)
self.event_manager.register_event(
'pokemon_favored',
parameters=('pokemon', 'iv', 'cp')
)
self.event_manager.register_event(
'pokemon_unfavored',
parameters=('pokemon', 'iv', 'cp')
)
self.event_manager.register_event(
'pokemon_evolve_check',
parameters=('has', 'needs')
)
self.event_manager.register_event(
'pokemon_upgraded',
parameters=('pokemon', 'iv', 'cp', 'new_cp', 'candy', 'stardust')
)
self.event_manager.register_event('skip_evolve')
self.event_manager.register_event('threw_berry_failed', parameters=('status_code',))
self.event_manager.register_event('vip_pokemon')
self.event_manager.register_event('gained_candy', parameters=('gained_candy', 'quantity', 'type'))
self.event_manager.register_event('catch_limit')
self.event_manager.register_event('spin_limit')
self.event_manager.register_event('show_best_pokemon', parameters=('pokemons'))
# level up stuff
self.event_manager.register_event(
'level_up',
parameters=(
'previous_level',
'current_level'
)
)
self.event_manager.register_event(
'level_up_reward',
parameters=('items',)
)
# lucky egg
self.event_manager.register_event(
'used_lucky_egg',
parameters=('amount_left',)
)
self.event_manager.register_event('lucky_egg_error')
# softban
self.event_manager.register_event('softban')
self.event_manager.register_event('softban_fix')
self.event_manager.register_event('softban_fix_done')
# egg incubating
self.event_manager.register_event(
'incubate_try',
parameters=(
'incubator_id',
'egg_id'
)
)
self.event_manager.register_event(
'incubate',
parameters=('distance_in_km',)
)
self.event_manager.register_event(
'next_egg_incubates',
parameters=('eggs_left', 'eggs_inc', 'eggs')
)
self.event_manager.register_event('incubator_already_used')
self.event_manager.register_event('egg_already_incubating')
self.event_manager.register_event(
'egg_hatched',
parameters=(
'name', 'cp', 'ncp', 'iv_ads', 'iv_pct', 'exp', 'stardust', 'candy'
)
)
self.event_manager.register_event('egg_hatched_fail')
# discard item
self.event_manager.register_event(
'item_discarded',
parameters=(
'amount', 'item', 'maximum'
)
)
self.event_manager.register_event(
'item_discard_skipped',
parameters=('space',)
)
self.event_manager.register_event(
'item_discard_fail',
parameters=('item',)
)
# inventory
self.event_manager.register_event('inventory_full')
# release
self.event_manager.register_event(
'keep_best_release',
parameters=(
'amount', 'pokemon', 'criteria'
)
)
self.event_manager.register_event(
'future_pokemon_release',
parameters=(
'pokemon', 'cp', 'iv', 'ivcp', 'below_iv', 'below_cp', 'below_ivcp', 'cp_iv_logic'
)
)
self.event_manager.register_event(
'pokemon_release',
parameters=('pokemon', 'iv', 'cp', 'ivcp', 'candy', 'candy_type')
)
self.event_manager.register_event(
'pokemon_keep',
parameters=('pokemon', 'iv', 'cp', 'ivcp')
)
# polyline walker
self.event_manager.register_event(
'polyline_request',
parameters=('url',)
)
# cluster
self.event_manager.register_event(
'found_cluster',
parameters=(
'num_points', 'forts', 'radius', 'distance'
)
)
self.event_manager.register_event(
'arrived_at_cluster',
parameters=(
'num_points', 'forts', 'radius'
)
)
# rename
self.event_manager.register_event(
'rename_pokemon',
parameters=('old_name', 'current_name',)
)
self.event_manager.register_event(
'pokemon_nickname_invalid',
parameters=('nickname',)
)
self.event_manager.register_event(
'unset_pokemon_nickname',
parameters=('old_name',)
)
# Move To map pokemon
self.event_manager.register_event(
'move_to_map_pokemon_fail',
parameters=('message',)
)
self.event_manager.register_event(
'move_to_map_pokemon_updated_map',
parameters=('lat', 'lon')
)
self.event_manager.register_event(
'move_to_map_pokemon_teleport_to',
parameters=('poke_name', 'poke_dist', 'poke_lat', 'poke_lon',
'disappears_in')
)
self.event_manager.register_event(
'move_to_map_pokemon_encounter',
parameters=('poke_name', 'poke_dist', 'poke_lat', 'poke_lon',
'disappears_in')
)
self.event_manager.register_event(
'move_to_map_pokemon_move_towards',
parameters=('poke_name', 'poke_dist', 'poke_lat', 'poke_lon',
'disappears_in')
)
self.event_manager.register_event(
'move_to_map_pokemon_teleport_back',
parameters=('last_lat', 'last_lon')
)
self.event_manager.register_event(
'moving_to_pokemon_throught_fort',
parameters=('fort_name', 'distance','poke_name','poke_dist')
)
self.event_manager.register_event(
'move_to_map_pokemon',
parameters=('message')
)
# cached recent_forts
self.event_manager.register_event('loaded_cached_forts')
self.event_manager.register_event('cached_fort')
self.event_manager.register_event(
'no_cached_forts',
parameters=('path', )
)
self.event_manager.register_event(
'error_caching_forts',
parameters=('path', )
)
# database shit
self.event_manager.register_event('catch_log')
self.event_manager.register_event('vanish_log')
self.event_manager.register_event('evolve_log')
self.event_manager.register_event('login_log')
self.event_manager.register_event('transfer_log')
self.event_manager.register_event('pokestop_log')
self.event_manager.register_event('softban_log')
self.event_manager.register_event('eggs_hatched_log')
self.event_manager.register_event(
'badges',
parameters=('badge', 'level')
)
self.event_manager.register_event(
'player_data',
parameters=('player_data', )
)
self.event_manager.register_event(
'forts_found',
parameters=('json')
)
# UseIncense
self.event_manager.register_event(
'use_incense',
parameters=('type', 'incense_count')
)
# BuddyPokemon
self.event_manager.register_event(
'buddy_update',
parameters=('name')
)
self.event_manager.register_event(
'buddy_update_fail',
parameters=('name', 'error')
)
self.event_manager.register_event(
'buddy_candy_earned',
parameters=('candy', 'family', 'quantity', 'candy_earned', 'candy_limit')
)
self.event_manager.register_event('buddy_candy_fail')
self.event_manager.register_event(
'buddy_next_reward',
parameters=('name', 'km_walked', 'km_total')
)
self.event_manager.register_event('buddy_keep_active')
self.event_manager.register_event(
'buddy_not_available',
parameters=('name')
)
# Sniper
self.event_manager.register_event('sniper_log', parameters=('message', 'message'))
self.event_manager.register_event('sniper_error', parameters=('message', 'message'))
self.event_manager.register_event('sniper_teleporting', parameters=('latitude', 'longitude', 'name'))
# Catch-limiter
self.event_manager.register_event('catch_limit_on')
self.event_manager.register_event('catch_limit_off')
def tick(self):
self.health_record.heartbeat()
self.cell = self.get_meta_cell()
if self.sleep_schedule:
self.sleep_schedule.work()
now = time.time() * 1000
for fort in self.cell["forts"]:
timeout = fort.get("cooldown_complete_timestamp_ms", 0)
if timeout >= now:
self.fort_timeouts[fort["id"]] = timeout
self._refresh_inventory()
self.tick_count += 1
# Check if session token has expired
self.check_session(self.position)
for worker in self.workers:
if worker.work() == WorkerResult.RUNNING:
return
def get_meta_cell(self):
location = self.position[0:2]
cells = self.find_close_cells(*location)
# Combine all cells into a single dict of the items we care about.
forts = []
wild_pokemons = []
catchable_pokemons = []
nearby_pokemons = []
for cell in cells:
if "forts" in cell and len(cell["forts"]):
forts += cell["forts"]
if "wild_pokemons" in cell and len(cell["wild_pokemons"]):
wild_pokemons += cell["wild_pokemons"]
if "catchable_pokemons" in cell and len(cell["catchable_pokemons"]):
catchable_pokemons += cell["catchable_pokemons"]
if "nearby_pokemons" in cell and len(cell["nearby_pokemons"]):
latlng = LatLng.from_point(Cell(CellId(cell["s2_cell_id"])).get_center())
for p in cell["nearby_pokemons"]:
p["latitude"] = latlng.lat().degrees
p["longitude"] = latlng.lng().degrees
p["s2_cell_id"] = cell["s2_cell_id"]
nearby_pokemons += cell["nearby_pokemons"]
# If there are forts present in the cells sent from the server or we don't yet have any cell data, return all data retrieved
if len(forts) > 1 or not self.cell:
return {
"forts": forts,
"wild_pokemons": wild_pokemons,
"catchable_pokemons": catchable_pokemons,
"nearby_pokemons": nearby_pokemons
}
# If there are no forts present in the data from the server, keep our existing fort data and only update the pokemon cells.
else:
return {
"forts": self.cell["forts"],
"wild_pokemons": wild_pokemons,
"catchable_pokemons": catchable_pokemons,
"nearby_pokemons": nearby_pokemons
}
def update_web_location(self, cells=[], lat=None, lng=None, alt=None):
# we can call the function with no arguments and still get the position
# and map_cells
if lat is None:
lat = self.api._position_lat
if lng is None:
lng = self.api._position_lng
if alt is None:
alt = self.api._position_alt
# dont cache when teleport_to
if self.api.teleporting:
return
if cells == []:
location = self.position[0:2]
cells = self.find_close_cells(*location)
user_data_cells = os.path.join(_base_dir, 'data', 'cells-%s.json' % self.config.username)
try:
with open(user_data_cells, 'w') as outfile:
json.dump(cells, outfile)
except IOError as e:
self.logger.info('[x] Error while opening location file: %s' % e)
user_web_location = os.path.join(
_base_dir, 'web', 'location-%s.json' % self.config.username
)
# alt is unused atm but makes using *location easier
try:
with open(user_web_location, 'w') as outfile:
json.dump({
'lat': lat,
'lng': lng,
'alt': alt,
'cells': cells
}, outfile)
except IOError as e:
self.logger.info('[x] Error while opening location file: %s' % e)
user_data_lastlocation = os.path.join(
_base_dir, 'data', 'last-location-%s.json' % self.config.username
)
try:
with open(user_data_lastlocation, 'w') as outfile:
json.dump({'lat': lat, 'lng': lng, 'alt': alt, 'start_position': self.start_position}, outfile)
except IOError as e:
self.logger.info('[x] Error while opening location file: %s' % e)
def emit_forts_event(self,response_dict):
map_objects = response_dict.get(
'responses', {}
).get('GET_MAP_OBJECTS', {})
status = map_objects.get('status', None)
map_cells = []
if status and status == 1:
map_cells = map_objects['map_cells']
if map_cells and len(map_cells):
for cell in map_cells:
if "forts" in cell and len(cell["forts"]):
self.event_manager.emit(
'forts_found',
sender=self,
level='debug',
formatted='Found forts {json}',
data={'json': json.dumps(cell["forts"])}
)
def find_close_cells(self, lat, lng):
cellid = get_cell_ids(lat, lng)
timestamp = [0, ] * len(cellid)
response_dict = self.get_map_objects(lat, lng, timestamp, cellid)
map_objects = response_dict.get(
'responses', {}
).get('GET_MAP_OBJECTS', {})
status = map_objects.get('status', None)
map_cells = []
if status and status == 1:
map_cells = map_objects['map_cells']
position = (lat, lng, 0)
map_cells.sort(
key=lambda x: distance(
lat,
lng,
x['forts'][0]['latitude'],
x['forts'][0]['longitude']) if x.get('forts', []) else 1e6
)
return map_cells
def check_session(self, position):
# Check session expiry
if self.api._auth_provider and self.api._auth_provider._ticket_expire:
# prevent crash if return not numeric value
if not str(self.api._auth_provider._ticket_expire).isdigit():
self.logger.info("Ticket expired value is not numeric", 'yellow')
remaining_time = \
self.api._auth_provider._ticket_expire / 1000 - time.time()
if remaining_time < 60:
self.event_manager.emit(
'api_error',
sender=self,
level='info',
formatted='Session stale, re-logging in.'
)
self.api = ApiWrapper(config=self.config)
self.api.set_position(*position)
self.login()
#self.api.set_signature_lib(self.get_encryption_lib())
#self.api.set_hash_lib(self.get_hash_lib())
def login(self):
status = {}
self.event_manager.emit(
'login_started',
sender=self,
level='info',
formatted="Login procedure started."
)
lat, lng = self.position[0:2]
self.api.set_position(lat, lng, self.alt) # or should the alt kept to zero?
try:
self.api.login(
self.config.auth_service,
str(self.config.username),
str(self.config.password))
except AuthException as e:
self.event_manager.emit(
'login_failed',
sender=self,
level='info',
formatted='Login process failed: {}'.format(e)
)
sys.exit()
with self.database as conn:
c = conn.cursor()
c.execute("SELECT COUNT(name) FROM sqlite_master WHERE type='table' AND name='login'")
result = c.fetchone()
if result[0] == 1:
conn.execute('''INSERT INTO login (timestamp, message) VALUES (?, ?)''', (time.time(), 'LOGIN_SUCCESS'))
else:
self.event_manager.emit(
'login_failed',
sender=self,
level='info',
formatted="Login table not founded, skipping log"
)
self.event_manager.emit(
'login_successful',
sender=self,
level='info',
formatted="Login successful."
)
# Start of security, to get various API Versions from different sources
# Get Official API
link = "https://pgorelease.nianticlabs.com/plfe/version"
f = urllib2.urlopen(link)
myfile = f.read()
f.close()
officalAPI = myfile[2:8]
self.event_manager.emit(
'security_check',
sender=self,
level='info',
formatted="Niantic Official API Version: {}".format(officalAPI)
)
link = "https://pokehash.buddyauth.com/api/hash/versions"
f = urllib2.urlopen(link)
myfile = f.read()
f.close()
bossland_hash_endpoint = myfile.split(",")
total_entry = int(len(bossland_hash_endpoint))
last_bossland_entry = bossland_hash_endpoint[total_entry-1]
bossland_lastestAPI = last_bossland_entry.split(":")[0].replace('\"','')
hashingAPI_temp = 0
self.event_manager.emit(
'security_check',
sender=self,
level='info',
formatted="Latest Bossland Hashing API Version: {}".format(bossland_lastestAPI)
)
if self.config.check_niantic_api is True:
if HashServer.endpoint == "":
self.event_manager.emit(
'security_check',
sender=self,
level='info',
formatted="Warning: Bot is running on legacy API"
)
else:
PGoAPI_hash_endpoint = HashServer.endpoint.split("com/",1)[1]
PGoAPI_hash_version = []
# Check if PGoAPI hashing is in Bossland versioning
bossland_hash_data = json.loads(myfile)
for version, endpoint in bossland_hash_data.items():
if endpoint == PGoAPI_hash_endpoint:
# Version should always be in this format x.xx.x
# Check total len, if less than 4, pack a zero behind
if len(version.replace('.','')) < 4:
version = version + ".0"
hashingAPI_temp = int(version.replace('.',''))
# iOS versioning is always more than 1.19.0
if hashingAPI_temp < 1190:
PGoAPI_hash_version.append(version)
# assuming andorid versioning is always last entry
PGoAPI_hash_version.sort(reverse=True)
# covert official api version & hashing api version to numbers
officialAPI_int = int(officalAPI.replace('.',''))
hashingAPI_int = int(PGoAPI_hash_version[0].replace('.',''))
if hashingAPI_int < officialAPI_int:
self.event_manager.emit(
'security_check',
sender=self,
level='info',
formatted="We have detected a Pokemon API Change. Latest Niantic Version is: {}. Program Exiting...".format(officalAPI)
)
sys.exit(1)
else:
self.event_manager.emit(
'security_check',
sender=self,
level='info',
formatted="Current PGoAPI is using API Version: {}. Niantic API Check Pass".format(PGoAPI_hash_version[0])
)
# When successful login, do a captcha check
#Basic Captcha detection, more to come
response_dict = self.api.check_challenge()
captcha_url = response_dict['responses']['CHECK_CHALLENGE']['challenge_url']
if len(captcha_url) > 1:
self.event_manager.emit(
'captcha',
sender=self,
level='critical',
formatted='Captcha Encountered, URL: {}'.format(captcha_url)
)
sys.exit(1)
self.event_manager.emit(
'captcha',
sender=self,
level='info',
formatted="Captcha Check Passed"
)
self.heartbeat()
def get_encryption_lib(self):
if _platform == "Windows" or _platform == "win32":
# Check if we are on 32 or 64 bit
if sys.maxsize > 2**32:
file_name = 'encrypt64.dll'
else:
file_name = 'encrypt32.dll'
if _platform.lower() == "darwin":
file_name= 'libencrypt-osx-64.so'
if _platform.lower() == "linux" or _platform.lower() == "linux2":
file_name = 'libencrypt-linux-x86-64.so'
if self.config.encrypt_location == '':
path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
else:
path = self.config.encrypt_location
full_path = ''
if os.path.isfile(path + '/' + file_name): # check encrypt_location or local dir first
full_path = path + '/' + file_name
elif os.path.isfile(path + '/src/pgoapi/pgoapi/lib/' + file_name): # if not found, check pgoapi lib folder
full_path = path + '/src/pgoapi/pgoapi/lib/' + file_name
if full_path == '':
self.logger.error(file_name + ' is not found! Please place it in the bots root directory or set encrypt_location in config.')
sys.exit(1)
else:
self.logger.info('Found '+ file_name +'! Platform: ' + _platform + ' ' + file_name + ' directory: ' + full_path)
return full_path
def get_hash_lib(self):
if _platform == "Windows" or _platform == "win32":
# Check if we are on 32 or 64 bit
if sys.maxsize > 2**32:
file_name = 'niantichash64.dll'
else:
file_name = 'niantichash32.dll'
if _platform.lower() == "darwin":
file_name= 'libniantichash-macos-64.dylib'
if _platform.lower() == "linux" or _platform.lower() == "linux2":
file_name = 'libniantichash-linux-x86-64.so'
if self.config.encrypt_location == '':
path = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
else:
path = self.config.encrypt_location
full_path = ''
if os.path.isfile(path + '/' + file_name): # check encrypt_location or local dir first
full_path = path + '/'+ file_name
elif os.path.isfile(path + '/src/pgoapi/pgoapi/lib/' + file_name): # if not found, check pgoapi lib folder
full_path = path + '/src/pgoapi/pgoapi/lib/' + file_name
if full_path == '':
self.logger.error(file_name + ' is not found! Please place it in the bots root directory or set encrypt_location in config.')
sys.exit(1)
else:
self.logger.info('Found '+ file_name +'! Platform: ' + _platform + ' ' + file_name + ' directory: ' + full_path)
return full_path
def _setup_api(self):
# instantiate pgoapi @var ApiWrapper
self.api = ApiWrapper(config=self.config)
# provide player position on the earth
self._set_starting_position()
self.login()
# chain subrequests (methods) into one RPC call
#self.api.set_signature_lib(self.get_encryption_lib())
#self.api.set_hash_lib(self.get_hash_lib())
self.logger.info('')
# send empty map_cells and then our position
self.update_web_location()
def _print_character_info(self):
# get player profile call
# ----------------------
response_dict = self.api.get_player()
# print('Response dictionary: \n\r{}'.format(json.dumps(response_dict, indent=2)))
currency_1 = "0"
currency_2 = "0"
warn = False
if response_dict:
self._player = response_dict['responses']['GET_PLAYER']['player_data']
if 'warn' in response_dict['responses']['GET_PLAYER']:
warn = response_dict['responses']['GET_PLAYER']['warn']
player = self._player
else:
self.logger.info(
"The API didn't return player info, servers are unstable - "
"retrying.", 'red'
)
sleep(5)
self._print_character_info()
# @@@ TODO: Convert this to d/m/Y H:M:S
creation_date = datetime.datetime.fromtimestamp(
player['creation_timestamp_ms'] / 1e3)
creation_date = creation_date.strftime("%Y/%m/%d %H:%M:%S")
pokecoins = '0'
stardust = '0'
items_inventory = inventory.items()
if 'amount' in player['currencies'][0]:
pokecoins = player['currencies'][0]['amount']
if 'amount' in player['currencies'][1]:
stardust = player['currencies'][1]['amount']
self.logger.info('')
self.logger.info('--- {username} ---'.format(**player))
self.logger.info(
'Pokemon Bag: {}/{}'.format(
inventory.Pokemons.get_space_used(),
inventory.get_pokemon_inventory_size()
)
)
self.logger.info(
'Items: {}/{}'.format(
inventory.Items.get_space_used(),
inventory.get_item_inventory_size()
)
)
self.logger.info(
'Stardust: {}'.format(stardust) +
' | Pokecoins: {}'.format(pokecoins)
)
# Items Output
self.logger.info(
'PokeBalls: ' + str(items_inventory.get(1).count) +
' | GreatBalls: ' + str(items_inventory.get(2).count) +
' | UltraBalls: ' + str(items_inventory.get(3).count) +
' | MasterBalls: ' + str(items_inventory.get(4).count))
self.logger.info(
'RazzBerries: ' + str(items_inventory.get(701).count) +
' | Nanab Berries: ' + str(items_inventory.get(703).count) +
' | Pinap Berries: ' + str(items_inventory.get(705).count))
self.logger.info(
'LuckyEgg: ' + str(items_inventory.get(301).count) +
' | Incubator: ' + str(items_inventory.get(902).count) +
' | TroyDisk: ' + str(items_inventory.get(501).count))
self.logger.info(
'Potion: ' + str(items_inventory.get(101).count) +
' | SuperPotion: ' + str(items_inventory.get(102).count) +
' | HyperPotion: ' + str(items_inventory.get(103).count) +
' | MaxPotion: ' + str(items_inventory.get(104).count))
self.logger.info(
'Incense: ' + str(items_inventory.get(401).count) +
' | IncenseSpicy: ' + str(items_inventory.get(402).count) +
' | IncenseCool: ' + str(items_inventory.get(403).count))
self.logger.info(
'Revive: ' + str(items_inventory.get(201).count) +
' | MaxRevive: ' + str(items_inventory.get(202).count))
self.logger.info(
'Sun Stone: ' + str(items_inventory.get(1101).count) +
' | Kings Rock: ' + str(items_inventory.get(1102).count) +
' | Metal Coat: ' + str(items_inventory.get(1103).count) +
' | Dragon Scale: ' + str(items_inventory.get(1104).count) +
' | Upgrade: ' + str(items_inventory.get(1105).count))
if warn:
self.logger.info('')
self.event_manager.emit(
'niantic_warning',
sender=self,
level='warning',
formatted="This account has recieved a warning from Niantic. Bot at own risk."
)
sleep(5) # Pause to allow user to see warning
self.logger.info('')
def _print_list_pokemon(self):
# get pokemon list
bag = inventory.pokemons().all()
id_list =list(set(map(lambda x: x.pokemon_id, bag)))
id_list.sort()
pokemon_list = [filter(lambda x: x.pokemon_id == y, bag) for y in id_list]
show_count = self.config.pokemon_bag_show_count
show_candies = self.config.pokemon_bag_show_candies
poke_info_displayed = self.config.pokemon_bag_pokemon_info
def get_poke_info(info, pokemon):
poke_info = {
'cp': 'CP {}'.format(pokemon.cp),
'iv_ads': 'A/D/S {}/{}/{}'.format(pokemon.iv_attack, pokemon.iv_defense, pokemon.iv_stamina),
'iv_pct': 'IV {}'.format(pokemon.iv),
'ivcp': 'IVCP {}'.format(round(pokemon.ivcp,2)),
'ncp': 'NCP {}'.format(round(pokemon.cp_percent,2)),
'level': "Level {}".format(pokemon.level),
'hp': 'HP {}/{}'.format(pokemon.hp, pokemon.hp_max),
'moveset': 'Moves: {}'.format(pokemon.moveset),
'dps': 'DPS {}'.format(round(pokemon.moveset.dps, 2))
}
if info not in poke_info:
raise ConfigException("info '{}' isn't available for displaying".format(info))
return poke_info[info]
self.logger.info('Pokemon:')
for pokes in pokemon_list:
pokes.sort(key=lambda p: p.cp, reverse=True)
line_p = '#{} {}'.format(pokes[0].pokemon_id, pokes[0].name)
if show_count:
line_p += '[{}]'.format(len(pokes))
if show_candies:
line_p += '[{} candies]'.format(pokes[0].candy_quantity)
line_p += ': '
poke_info = ['({})'.format(', '.join([get_poke_info(x, p) for x in poke_info_displayed])) for p in pokes]
self.logger.info(line_p + ' | '.join(poke_info))
self.logger.info('')
def use_lucky_egg(self):
return self.api.use_item_xp_boost(item_id=301)
def _set_starting_position(self):
self.event_manager.emit(
'set_start_location',
sender=self,
level='info',
formatted='Setting start location.'
)
has_position = False
if self.config.test:
# TODO: Add unit tests
return
if self.wake_location:
msg = "Wake up location found: {location} {position}"
self.event_manager.emit(
'location_found',
sender=self,
level='info',
formatted=msg,
data={
'location': self.wake_location['raw'],
'position': self.wake_location['coord']
}
)
self.api.set_position(*self.wake_location['coord'])
self.event_manager.emit(
'position_update',
sender=self,
level='info',
formatted="Now at {current_position}",
data={
'current_position': self.position,
'last_position': '',
'distance': '',
'distance_unit': ''
}
)
self.start_position = self.position
has_position = True
return
if self.config.location:
location_str = self.config.location
location = self.get_pos_by_name(location_str.replace(" ", ""))
msg = "Location found: {location} {position}"
self.event_manager.emit(
'location_found',
sender=self,
level='info',
formatted=msg,
data={
'location': location_str,
'position': location
}
)
self.api.set_position(*location)
self.event_manager.emit(
'position_update',
sender=self,
level='info',
formatted="Now at {current_position}",
data={
'current_position': self.position,
'last_position': '',
'distance': '',
'distance_unit': ''
}
)
self.start_position = self.position
has_position = True
if self.config.location_cache:
try:
# save location flag used to pull the last known location from
# the location.json
self.event_manager.emit(
'load_cached_location',
sender=self,
level='debug',
formatted='Loading cached location...'
)
json_file = os.path.join(_base_dir, 'data', 'last-location-%s.json' % self.config.username)
try:
with open(json_file, "r") as infile:
location_json = json.load(infile)
except (IOError, ValueError):
# Unable to read json file.
# File may be corrupt. Create a new one.
location_json = []
except:
raise FileIOException("Unexpected error reading from {}".web_inventory)
location = (
location_json['lat'],
location_json['lng'],
location_json['alt'],
)
# If location has been set in config, only use cache if starting position has not differed
if has_position and 'start_position' in location_json:
last_start_position = tuple(location_json.get('start_position', []))
# Start position has to have been set on a previous run to do this check
if last_start_position and last_start_position != self.start_position:
msg = 'Going to a new place, ignoring cached location.'
self.event_manager.emit(
'location_cache_ignored',
sender=self,
level='debug',
formatted=msg
)
return
self.api.set_position(*location)
self.event_manager.emit(
'position_update',
sender=self,
level='debug',
formatted='Loaded location {current_position} from cache',
data={
'current_position': location,
'last_position': '',
'distance': '',
'distance_unit': ''
}
)
has_position = True
except Exception:
if has_position is False:
sys.exit(
"No cached Location. Please specify initial location."
)
self.event_manager.emit(
'location_cache_error',
sender=self,
level='debug',
formatted='Parsing cached location failed.'
)
def get_pos_by_name(self, location_name):
# Check if given location name, belongs to favorite_locations
favorite_location_coords = self._get_pos_by_fav_location(location_name)
if favorite_location_coords is not None:
return favorite_location_coords
# Check if the given location is already a coordinate.
if ',' in location_name:
possible_coordinates = re.findall(
"[-]?\d{1,3}(?:[.]\d+)?", location_name
)
if len(possible_coordinates) >= 2:
# 2 matches, this must be a coordinate. We'll bypass the Google
# geocode so we keep the exact location.
self.logger.info(
'[x] Coordinates found in passed in location, '
'not geocoding.'
)
return float(possible_coordinates[0]), float(possible_coordinates[1]), (float(possible_coordinates[2]) if len(possible_coordinates) == 3 else self.alt)
geolocator = GoogleV3(api_key=self.config.gmapkey)
loc = geolocator.geocode(location_name, timeout=10)
return float(loc.latitude), float(loc.longitude), float(loc.altitude)
def _get_pos_by_fav_location(self, location_name):
location_name = location_name.lower()
coords = None
for location in self.config.favorite_locations:
if location.get('name').lower() == location_name:
coords = re.findall(
"[-]?\d{1,3}[.]\d{3,7}", location.get('coords').strip()
)
if len(coords) >= 2:
self.logger.info('Favorite location found: {} ({})'.format(location_name, coords))
break
#TODO: This is real bad
if coords is None:
return coords
else:
return float(coords[0]), float(coords[1]), (float(coords[2]) if len(coords) == 3 else self.alt)
def heartbeat(self):
# Remove forts that we can now spin again.
now = time.time()
self.fort_timeouts = {id: timeout for id, timeout
in self.fort_timeouts.iteritems()
if timeout >= now * 1000}
if now - self.last_heartbeat >= self.heartbeat_threshold and not self.hb_locked:
self.last_heartbeat = now
request = self.api.create_request()
request.get_player()
request.check_awarded_badges()
try:
responses = request.call()
except NotLoggedInException:
self.logger.warning('Unable to login, retying')
except:
self.logger.warning('Error occured in heatbeat, retying')
if responses['responses']['GET_PLAYER']['success'] == True:
# we get the player_data anyway, might as well store it
self._player = responses['responses']['GET_PLAYER']['player_data']
self.event_manager.emit(
'player_data',
sender=self,
level='debug',
formatted='player_data: {player_data}',
data={'player_data': self._player}
)
if responses['responses']['CHECK_AWARDED_BADGES']['success'] == True:
# store awarded_badges reponse to be used in a task or part of heartbeat
self._awarded_badges = responses['responses']['CHECK_AWARDED_BADGES']
if 'awarded_badges' in self._awarded_badges:
i = 0
for badge in self._awarded_badges['awarded_badges']:
badgelevel = self._awarded_badges['awarded_badge_levels'][i]
badgename = badge_type_pb2._BADGETYPE.values_by_number[badge].name
i += 1
self.event_manager.emit(
'badges',
sender=self,
level='info',
formatted='awarded badge: {badge}, lvl {level}',
data={'badge': badgename,
'level': badgelevel}
)
human_behaviour.action_delay(3, 10)
try:
self.web_update_queue.put_nowait(True) # do this outside of thread every tick
except Queue.Full:
pass
threading.Timer(self.heartbeat_threshold, self.heartbeat).start()
def update_web_location_worker(self):
while True:
self.web_update_queue.get()
self.update_web_location()
def display_player_info(self):
player_stats = player()
if player_stats:
nextlvlxp = (int(player_stats.next_level_xp) - int(player_stats.exp))
self.logger.info(
'Level: {}'.format(player_stats.level) +
' (Next Level: {} XP)'.format(nextlvlxp) +
' (Total: {} XP)'
''.format(player_stats.exp))
self.logger.info(
'Pokemon Captured: '
'{}'.format(player_stats.pokemons_captured) +
' | Pokestops Visited: '
'{}'.format(player_stats.poke_stop_visits))
def get_forts(self, order_by_distance=False):
forts = [fort
for fort in self.cell['forts']
if 'latitude' in fort and 'type' in fort]
if order_by_distance:
forts.sort(key=lambda x: distance(
self.position[0],
self.position[1],
x['latitude'],
x['longitude']
))
return forts
def get_map_objects(self, lat, lng, timestamp, cellid):
if time.time() - self.last_time_map_object < self.config.map_object_cache_time:
return self.last_map_object
self.last_map_object = self.api.get_map_objects(
latitude=f2i(lat),
longitude=f2i(lng),
since_timestamp_ms=timestamp,
cell_id=cellid
)
self.emit_forts_event(self.last_map_object)
#if self.last_map_object:
# print self.last_map_object
self.last_time_map_object = time.time()
return self.last_map_object
def _load_recent_forts(self):
if not self.config.forts_cache_recent_forts:
return
cached_forts_path = os.path.join(_base_dir, 'data', 'recent-forts-%s.json' % self.config.username)
try:
# load the cached recent forts
cached_recent_forts = []
try:
with open(cached_forts_path) as f:
cached_recent_forts = json.load(f)
except (IOError, ValueError) as e:
self.logger.info('[x] Error while opening cached forts: %s' % e)
except:
raise FileIOException("Unexpected error opening {}".cached_forts_path)
num_cached_recent_forts = len(cached_recent_forts)
num_recent_forts = len(self.recent_forts)
# Handles changes in max_circle_size
if not num_recent_forts:
self.recent_forts = []
elif num_recent_forts > num_cached_recent_forts:
self.recent_forts[-num_cached_recent_forts:] = cached_recent_forts
elif num_recent_forts < num_cached_recent_forts:
self.recent_forts = cached_recent_forts[-num_recent_forts:]
else:
self.recent_forts = cached_recent_forts
self.event_manager.emit(
'loaded_cached_forts',
sender=self,
level='debug',
formatted='Loaded cached forts...'
)
except IOError:
self.event_manager.emit(
'no_cached_forts',
sender=self,
level='debug',
formatted='Starting new cached forts for {path}',
data={'path': cached_forts_path}
)
def _refresh_inventory(self):
# Perform inventory update every n seconds
now = time.time()
if now - self.last_inventory_refresh >= self.inventory_refresh_threshold:
inventory.refresh_inventory()
self.last_inventory_refresh = now
self.inventory_refresh_counter += 1
| {
"content_hash": "079708437a47d9f9bfd1d1ee7b497c1d",
"timestamp": "",
"source": "github",
"line_count": 1691,
"max_line_length": 188,
"avg_line_length": 36.32052040212892,
"alnum_prop": 0.5214920707284509,
"repo_name": "heihachi/PokemonGo-Bot",
"id": "fa23738745bbfeeaf007ed196e8ede0899abd58f",
"size": "61442",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "pokemongo_bot/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "27071"
},
{
"name": "CSS",
"bytes": "1519"
},
{
"name": "HTML",
"bytes": "5645"
},
{
"name": "JavaScript",
"bytes": "317991"
},
{
"name": "Python",
"bytes": "671128"
},
{
"name": "Shell",
"bytes": "7071"
}
],
"symlink_target": ""
} |
import numpy
from chainer import cuda
from chainer import function
from chainer.utils import type_check
class CReLU(function.Function):
"""Concatenated Rectified Linear Unit."""
def __init__(self, axis=1):
if not isinstance(axis, int):
raise TypeError('axis must be an integer value')
self.axis = axis
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
in_types[0].dtype == numpy.float32,
in_types[0].ndim > self.axis,
in_types[0].ndim >= -self.axis
)
def get_output_shape(self, input_shape):
output_shape = list(input_shape)
output_shape[self.axis] *= 2
return tuple(output_shape)
def forward(self, x):
x, = x
xp = cuda.get_array_module(x)
y = xp.empty(self.get_output_shape(x.shape), dtype=numpy.float32)
y_former, y_latter = xp.split(y, 2, axis=self.axis)
zero = x.dtype.type(0)
xp.maximum(zero, x, out=y_former)
xp.maximum(zero, -x, out=y_latter)
return y,
def backward(self, x, gy):
x, = x
xp = cuda.get_array_module(x)
gy, = gy
gy_former, gy_latter = xp.split(gy, 2, axis=self.axis)
return gy_former * (x > 0) - gy_latter * (-x > 0),
def crelu(x, axis=1):
"""Concatenated Rectified Linear Unit function.
This function is expressed as :math:`f(x) = (\\max(0, x), \\max(0, -x))`,
where two output values are concatenated along an axis.
See: http://arxiv.org/abs/1603.05201
Args:
x (~chainer.Variable): Input variable.
axis (int): Axis that the output values are concatenated along
Returns:
~chainer.Variable: Output variable.
"""
return CReLU(axis=axis)(x)
| {
"content_hash": "bff667313ec1fd57174e806671d67511",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 77,
"avg_line_length": 28.234375,
"alnum_prop": 0.5866076369673492,
"repo_name": "kikusu/chainer",
"id": "f9e9a522104acc480edb5cbbdee30cb8f2b23a7b",
"size": "1807",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "chainer/functions/activation/crelu.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "C",
"bytes": "29678"
},
{
"name": "Cuda",
"bytes": "6634"
},
{
"name": "PowerShell",
"bytes": "7195"
},
{
"name": "Python",
"bytes": "1896434"
}
],
"symlink_target": ""
} |
import sys, traceback
import os
import logging
import csv
import qbo
import airbitzwallets
# If only utility script is called
if len(sys.argv) <= 1:
sys.exit("Usage: python %s <options> <csvfiles>\n"
"Where possible options include:\n"
" -btc Output bitcoin in full BTC denomination\n"
" -mbtc Output bitcoin in mBTC denomination\n"
" -bits Output bitcoin in bits (uBTC) denomination" % sys.argv[0]
)
# If help is requested
elif (sys.argv[1] == '--help'):
sys.exit("Help for %s not yet implemented." % sys.argv[0])
# Test for valid options, instantiate appropiate provider object
if sys.argv[1] == '-mbtc':
denom = 1000
elif sys.argv[1] == '-btc':
denom = 1
elif sys.argv[1] == '-bits':
denom = 1000000
myProvider = airbitzwallets.airbitzwallets()
# For each CSV file listed for conversion
for arg in sys.argv:
if sys.argv.index(arg) > 1:
try:
with open(arg[:len(arg)-3] + 'log'):
os.remove(arg[:len(arg)-3] + 'log')
except IOError:
pass
logging.basicConfig(filename=arg[:len(arg)-3] + 'log', level=logging.INFO)
logging.info("Opening '%s' CSV File" % myProvider.getName())
try:
with open(arg, 'r') as csvfile:
# Open CSV for reading
reader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
#instantiate the qbo object
myQbo = None
myQbo = qbo.qbo()
txnCount = 0
for row in reader:
txnCount = txnCount+1
sdata = str(row)
#read in values from row of csv file
date_posted = myProvider.getDatePosted(myProvider,row)
txn_memo = myProvider.getTxnMemo(myProvider,row)
txn_amount = myProvider.getTxnAmount(myProvider,row)
txn_curamt = myProvider.getTxnCurAmt(myProvider,row)
txn_category = myProvider.getTxnCategory(myProvider,row)
txn_id = myProvider.getTxnId(myProvider,row)
name = myProvider.getTxnName(myProvider,row)
try:
#Add transaction to the qbo document
if myQbo.addTransaction(denom, date_posted, txn_memo, txn_id, txn_amount, txn_curamt, txn_category, name):
print('Transaction [' + str(txnCount) + '] added successfully!')
logging.info('Transaction [' + str(txnCount) + '] added successfully!')
except:
#Error adding transaction
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Transaction [" + str(txnCount) + "] excluded!")
logging.info('>> Data: ' + str(sdata))
pass
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Trouble reading CSV file!")
# After transactions have been read, write full QBO document to file
try:
filename = arg[:len(arg)-3] + 'qbo'
if myQbo.Write('./'+ filename):
print("QBO file written successfully!")
#log successful write
logging.info("QBO file %s written successfully!" % filename)
except:
#IO Error
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info(''.join('!! ' + line for line in lines))
| {
"content_hash": "a18b2f1948bb7ad66f63226c08d1c271",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 154,
"avg_line_length": 39.705882352941174,
"alnum_prop": 0.5451851851851852,
"repo_name": "Airbitz/airbitz-ofx",
"id": "ae75ff7994410f7e88a0e941f01acf2c32ca349b",
"size": "4676",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "csvtoqbo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "16366"
}
],
"symlink_target": ""
} |
from plenum.test import waits
from plenum.test.node_request.helper import sdk_ensure_pool_functional
from plenum.test.restart.helper import get_group, restart_nodes
nodeCount = 7
def test_restart_groups_6_of_7_wp_tm(looper, txnPoolNodeSet, tconf, tdir,
sdk_pool_handle, sdk_wallet_client, allPluginsPath):
tm = tconf.ToleratePrimaryDisconnection + waits.expectedPoolElectionTimeout(len(txnPoolNodeSet))
restart_group = get_group(txnPoolNodeSet, 6, include_primary=True)
restart_nodes(looper, txnPoolNodeSet, restart_group, tconf, tdir, allPluginsPath,
after_restart_timeout=tm, start_one_by_one=True)
sdk_ensure_pool_functional(looper, txnPoolNodeSet, sdk_wallet_client, sdk_pool_handle)
| {
"content_hash": "7bd88b5a78bce80bb73a2c2a362f56e4",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 100,
"avg_line_length": 47.75,
"alnum_prop": 0.7329842931937173,
"repo_name": "evernym/zeno",
"id": "12c50b74b5df0dfd60c8d193112329ec1f3fc316",
"size": "764",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "plenum/test/restart/test_restart_nodes_6_wp.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "531061"
}
],
"symlink_target": ""
} |
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
__version__ = "0.1.0"
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = "1.6.3"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
]
# autodoc/autosummary flags
autoclass_content = "both"
autodoc_default_flags = ["members"]
autosummary_generate = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# Allow markdown includes (so releases.md can include CHANGLEOG.md)
# http://www.sphinx-doc.org/en/master/markdown.html
source_parsers = {".md": "recommonmark.parser.CommonMarkParser"}
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = [".rst", ".md"]
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = u"google-cloud-phishingprotection"
copyright = u"2017, Google"
author = u"Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = __version__
# The short X.Y version.
version = ".".join(release.split(".")[0:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "alabaster"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"description": "Google Cloud Client Libraries for Python",
"github_user": "googleapis",
"github_repo": "google-cloud-python",
"github_banner": True,
"font_family": "'Roboto', Georgia, sans",
"head_font_family": "'Roboto', Georgia, serif",
"code_font_family": "'Roboto Mono', 'Consolas', monospace",
}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "google-cloud-phishingprotection-doc"
# -- Options for warnings ------------------------------------------------------
suppress_warnings = [
# Temporarily suppress this to avoid "more than one target found for
# cross-reference" warning, which are intractable for us to avoid while in
# a mono-repo.
# See https://github.com/sphinx-doc/sphinx/blob
# /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843
"ref.python"
]
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(
master_doc,
"google-cloud-phishingprotection.tex",
u"google-cloud-phishingprotection Documentation",
author,
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
master_doc,
"google-cloud-phishingprotection",
u"google-cloud-phishingprotection Documentation",
[author],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"google-cloud-phishingprotection",
u"google-cloud-phishingprotection Documentation",
author,
"google-cloud-phishingprotection",
"GAPIC library for the {metadata.shortName} v1beta1 service",
"APIs",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
"python": ("http://python.readthedocs.org/en/latest/", None),
"gax": ("https://gax-python.readthedocs.org/en/latest/", None),
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
"google-gax": ("https://gax-python.readthedocs.io/en/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest", None),
"grpc": ("https://grpc.io/grpc/python/", None),
"requests": ("https://requests.kennethreitz.org/en/master/", None),
"fastavro": ("https://fastavro.readthedocs.io/en/stable/", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable/", None),
}
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
| {
"content_hash": "6f2aac67b97c557b66d41fd727b08b21",
"timestamp": "",
"source": "github",
"line_count": 350,
"max_line_length": 86,
"avg_line_length": 33.18571428571428,
"alnum_prop": 0.6911752044769695,
"repo_name": "tseaver/google-cloud-python",
"id": "0c995e96c6084faf297e6634c0be70b65dacc3d3",
"size": "11998",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "phishingprotection/docs/conf.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "1094"
},
{
"name": "Python",
"bytes": "30519057"
},
{
"name": "Shell",
"bytes": "9148"
}
],
"symlink_target": ""
} |
from __future__ import division
from lxml import etree
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import datetime
import requests
import zipfile
import os
import runProcs
# get_ipython().magic('matplotlib inline')
# In[14]:
# 0. Define a functions for managing parsed z1 data
# 0.1 Create a searchable legend in csv format
def createLegend(root):
legend_df = pd.DataFrame(columns=['Series Name','Description','Frequency','Start','End'])
x = 0
for levelA in root:
for levelB in levelA[4:-1]:
entry = []
ident = levelB.get('SERIES_NAME')
freq = ident[-1]
entry.append(ident)
for levelC in levelB:
for n,levelD in enumerate(levelC):
if n == 0:
entry.append(levelD[1].text)
entry.append(freq)
for levelC in [levelB[1],levelB[-1]]:
d= levelC.get('TIME_PERIOD')
d= datetime.datetime.strptime(d, '%Y-%m-%d').strftime('%m-%d-%Y')
entry.append(d)
legend_df.loc[x] = entry
x+=1
legend_df.to_csv('z1Legend.csv')
return legend_df
# 0.2 Create a Pandas dataframe for the z1 series with code: name
def getSeries(name):
dates = []
value= np.array([])
for levelA in root:
for levelB in levelA:
ident = levelB.get('SERIES_NAME')
if ident in [name]:
for levelC in levelB:
for n,levelD in enumerate(levelC):
if n == 0:
description = levelD[1].text
for levelC in levelB[1:]:
v = levelC.get('OBS_VALUE')
d= levelC.get('TIME_PERIOD')
dates = np.append(dates,d)
value = np.append(value,float(v))
for n,d in enumerate(dates):
dates[n]= datetime.datetime.strptime(d,'%Y-%m-%d').strftime('%m-%d-%Y')
df = pd.DataFrame(value,index=dates,columns = [description])
return df
# 0.3 Create a Pandas dataframe for the z1 series with codes in dataList
def getDataSet(dataList):
df = pd.DataFrame([])
for name in dataList:
newSeries=getSeries(name)
df = pd.concat([df,newSeries],axis=1, join_axes=[newSeries.index])
return df
# In[15]:
# 1. Download .zip file and extract .xml (and other contents if necessary)
# 1.1 download
url = "http://www.federalreserve.gov/datadownload/Output.aspx?rel=Z1&filetype=zip"
file_name = "FRB_Z1.zip"
u = requests.get(url)
with open(file_name, 'wb') as f:
for chunk in u.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
f.close()
# 1.2 extract
fh = open('FRB_Z1.zip', 'rb')
z = zipfile.ZipFile(fh)
for name in ['Z1_data.xml']:
outpath = ""
z.extract(name, outpath)
fh.close()
# with zipfile.ZipFile('FRB_Z1.zip', "r") as z:
# z.extractall()
# 1.3 Remove the zip file
os.remove('FRB_Z1.zip')
# In[16]:
# 2. Import the xml data and create a legend
# 2.2 parse
tree = etree.parse("Z1_data.xml")
root = tree.getroot()
# 2.2 create a legend in csv format
# legend= createLegend(root)
# In[17]:
# 3. Sample plot: US T-bill volume
tBills = getSeries('FL313161113.A')
tBills.plot(x_compat=True)
# In[18]:
# 4. export the notebook
runProcs.exportNb('z1data')
| {
"content_hash": "d32dc45eac983b19c9d21f91fdf7cb74",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 93,
"avg_line_length": 25.02158273381295,
"alnum_prop": 0.5784933870040253,
"repo_name": "letsgoexploring/economicData",
"id": "993550087ac0edc8b1927ebbe4dbadc9adc371b5",
"size": "3507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "z1data/z1data.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "130"
},
{
"name": "HTML",
"bytes": "441727"
},
{
"name": "Jupyter Notebook",
"bytes": "5008103"
},
{
"name": "Python",
"bytes": "146725"
}
],
"symlink_target": ""
} |
import os
import sys
import platform
from distutils.version import LooseVersion
def get_dist():
try:
return platform.dist()
except AttributeError:
return platform.dist()
def deplist(packages=[]):
''' This method only really works for yum versions of fedora
Takes a single argument, list of package names. They can be yum names, or
rpm filenames
Returns two lists, first a list of dependency strings suitable for the
"yum install" command, and a second set of strings not currently in the
repo, known as the local dependencies.'''
sys.path.insert(0, '/usr/share/yum-cli') #Works in centos 7 at least
import cli
import yum
base = cli.YumBaseCli()
pkgs = []
for package in packages:
if (package.endswith('.rpm') and (yum.misc.re_remote_url(package) or
os.path.exists(package))):
thispkg = yum.packages.YumUrlPackage(base, base.ts, package)
pkgs.append(thispkg)
elif base.conf.showdupesfromrepos:
pkgs.extend(base.pkgSack.returnPackages(patterns=[package],
ignore_case=True))
else:
try:
pkgs.extend(base.pkgSack.returnNewestByName(patterns=[package],
ignore_case=True))
except yum.Errors.PackageSackError:
pass
results = base.findDeps(pkgs)
deps = []
local_deps = []
for pkg in pkgs:
#print 'Package', pkg.compactPrint(), 'needs'
result = results[pkg]
for dep in result.keys():
if not results[pkg][dep]:
#print 'Dep not found:', yum.misc.prco_tuple_to_string(dep)
local_deps.append(yum.misc.prco_tuple_to_string(dep))
else:
#print 'Dep found:', yum.misc.prco_tuple_to_string(dep)
deps.append(yum.misc.prco_tuple_to_string(dep))
return deps, local_deps
def main(dep_filename='deps.txt', local_dep_filename='local_deps.txt',
packages=[]):
(os_name, os_version, os_id) = get_dist()
os_name = os_name.lower()
os_version = LooseVersion(os_version)
if os_name.startswith('redhat') or os_name.startswith('centos'):
if os_version>= LooseVersion('7'):
(deps, local_deps) = deplist(packages)
elif os_version>= LooseVersion('6'):
pass
elif os_version>= LooseVersion('5'):
pass
if os_name.startswith('fedora'):
pass
if os_name.startswith('suse'):
pass
fid = open(dep_filename, 'w')
local_fid = open(local_dep_filename, 'w')
#deps should have both local and repo dependencies
deps = deps + local_deps
deps.sort()
local_deps.sort()
for dep in deps:
fid.write(dep+'\n')
for dep in local_deps:
local_fid.write(dep+'\n')
if __name__=='__main__':
#No argparse for now... Python 2.4 compat ;(
main(sys.argv[1], sys.argv[2], sys.argv[3:]) | {
"content_hash": "aefc052c6bd19f03238468123237580e",
"timestamp": "",
"source": "github",
"line_count": 95,
"max_line_length": 79,
"avg_line_length": 29.88421052631579,
"alnum_prop": 0.6276858048608664,
"repo_name": "andyneff/dockrpm",
"id": "b8cadbc41768ef5052c498a75752c95e264d0c79",
"size": "2861",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "deplist.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2274"
},
{
"name": "Python",
"bytes": "41830"
},
{
"name": "Shell",
"bytes": "1140"
}
],
"symlink_target": ""
} |
"""Volume copy throttling helpers."""
import contextlib
from oslo_concurrency import processutils
from oslo_log import log as logging
from cinder import exception
from cinder import utils
LOG = logging.getLogger(__name__)
class Throttle(object):
"""Base class for throttling disk I/O bandwidth"""
DEFAULT = None
@staticmethod
def set_default(throttle):
Throttle.DEFAULT = throttle
@staticmethod
def get_default():
return Throttle.DEFAULT or Throttle()
def __init__(self, prefix=None):
self.prefix = prefix or []
@contextlib.contextmanager
def subcommand(self, srcpath, dstpath):
"""Sub-command that reads from srcpath and writes to dstpath.
Throttle disk I/O bandwidth used by a sub-command, such as 'dd',
that reads from srcpath and writes to dstpath. The sub-command
must be executed with the generated prefix command.
"""
yield {'prefix': self.prefix}
class BlkioCgroup(Throttle):
"""Throttle disk I/O bandwidth using blkio cgroups."""
def __init__(self, bps_limit, cgroup_name):
self.bps_limit = bps_limit
self.cgroup = cgroup_name
self.srcdevs = {}
self.dstdevs = {}
try:
utils.execute('cgcreate', '-g', 'blkio:%s' % self.cgroup,
run_as_root=True)
except processutils.ProcessExecutionError:
LOG.error('Failed to create blkio cgroup \'%(name)s\'.',
{'name': cgroup_name})
raise
def _get_device_number(self, path):
try:
return utils.get_blkdev_major_minor(path)
except exception.Error as e:
LOG.error('Failed to get device number for throttling: '
'%(error)s', {'error': e})
def _limit_bps(self, rw, dev, bps):
try:
utils.execute('cgset', '-r', 'blkio.throttle.%s_bps_device=%s %d'
% (rw, dev, bps), self.cgroup, run_as_root=True)
except processutils.ProcessExecutionError:
LOG.warning('Failed to setup blkio cgroup to throttle the '
'device \'%(device)s\'.', {'device': dev})
def _set_limits(self, rw, devs):
total = sum(devs.values())
for dev in sorted(devs):
self._limit_bps(rw, dev, self.bps_limit * devs[dev] / total)
@utils.synchronized('BlkioCgroup')
def _inc_device(self, srcdev, dstdev):
if srcdev:
self.srcdevs[srcdev] = self.srcdevs.get(srcdev, 0) + 1
self._set_limits('read', self.srcdevs)
if dstdev:
self.dstdevs[dstdev] = self.dstdevs.get(dstdev, 0) + 1
self._set_limits('write', self.dstdevs)
@utils.synchronized('BlkioCgroup')
def _dec_device(self, srcdev, dstdev):
if srcdev:
self.srcdevs[srcdev] -= 1
if self.srcdevs[srcdev] == 0:
del self.srcdevs[srcdev]
self._set_limits('read', self.srcdevs)
if dstdev:
self.dstdevs[dstdev] -= 1
if self.dstdevs[dstdev] == 0:
del self.dstdevs[dstdev]
self._set_limits('write', self.dstdevs)
@contextlib.contextmanager
def subcommand(self, srcpath, dstpath):
srcdev = self._get_device_number(srcpath)
dstdev = self._get_device_number(dstpath)
if srcdev is None and dstdev is None:
yield {'prefix': []}
return
self._inc_device(srcdev, dstdev)
try:
yield {'prefix': ['cgexec', '-g', 'blkio:%s' % self.cgroup]}
finally:
self._dec_device(srcdev, dstdev)
| {
"content_hash": "66e2b89ee0c854f27128c00628667609",
"timestamp": "",
"source": "github",
"line_count": 115,
"max_line_length": 77,
"avg_line_length": 31.965217391304346,
"alnum_prop": 0.5783460282916213,
"repo_name": "Datera/cinder",
"id": "39cbbeb990042d0870aa207ac66ff097172a394f",
"size": "4322",
"binary": false,
"copies": "4",
"ref": "refs/heads/datera_queens_backport",
"path": "cinder/volume/throttling.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15242306"
},
{
"name": "Shell",
"bytes": "8187"
}
],
"symlink_target": ""
} |
from django.db import models, migrations
import open_humans.models
class Migration(migrations.Migration):
dependencies = [
('open_humans', '0009_random_member_id'),
]
operations = [
migrations.AlterField(
model_name='member',
name='member_id',
field=models.CharField(default=open_humans.models.random_member_id, unique=True, max_length=8),
preserve_default=True,
),
]
| {
"content_hash": "746f52dbec9ffaa91254af33f3234985",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 107,
"avg_line_length": 25.555555555555557,
"alnum_prop": 0.6130434782608696,
"repo_name": "PersonalGenomesOrg/open-humans",
"id": "4ff61730c1d7ef75b85885d7f33e1fa3d429605d",
"size": "485",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "open_humans/migrations/0010_auto_20150311_1922.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "19829"
},
{
"name": "HTML",
"bytes": "296839"
},
{
"name": "JavaScript",
"bytes": "25622"
},
{
"name": "Python",
"bytes": "435909"
},
{
"name": "Shell",
"bytes": "721"
}
],
"symlink_target": ""
} |
"""
Settings and configuration for Django.
Read values from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global_settings.py
for a list of all possible variables.
"""
import importlib
import os
import time
import traceback
import warnings
from pathlib import Path
import django
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.functional import LazyObject, empty
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
# RemovedInDjango50Warning
USE_DEPRECATED_PYTZ_DEPRECATED_MSG = (
'The USE_DEPRECATED_PYTZ setting, and support for pytz timezones is '
'deprecated in favor of the stdlib zoneinfo module. Please update your '
'code to use zoneinfo and remove the USE_DEPRECATED_PYTZ setting.'
)
USE_L10N_DEPRECATED_MSG = (
'The USE_L10N setting is deprecated. Starting with Django 5.0, localized '
'formatting of data will always be enabled. For example Django will '
'display numbers and dates using the format of the current locale.'
)
class SettingsReference(str):
"""
String subclass which references a current settings value. It's treated as
the value in memory but serializes to a settings.NAME attribute reference.
"""
def __new__(self, value, setting_name):
return str.__new__(self, value)
def __init__(self, value, setting_name):
self.setting_name = setting_name
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time settings are needed, if the user hasn't
configured settings manually.
"""
settings_module = os.environ.get(ENVIRONMENT_VARIABLE)
if not settings_module:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE))
self._wrapped = Settings(settings_module)
def __repr__(self):
# Hardcode the class name as otherwise it yields 'Settings'.
if self._wrapped is empty:
return '<LazySettings [Unevaluated]>'
return '<LazySettings "%(settings_module)s">' % {
'settings_module': self._wrapped.SETTINGS_MODULE,
}
def __getattr__(self, name):
"""Return the value of a setting and cache it in self.__dict__."""
if self._wrapped is empty:
self._setup(name)
val = getattr(self._wrapped, name)
# Special case some settings which require further modification.
# This is done here for performance reasons so the modified value is cached.
if name in {'MEDIA_URL', 'STATIC_URL'} and val is not None:
val = self._add_script_prefix(val)
elif name == 'SECRET_KEY' and not val:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
self.__dict__[name] = val
return val
def __setattr__(self, name, value):
"""
Set the value of setting. Clear all cached values if _wrapped changes
(@override_settings does this) or clear single values when set.
"""
if name == '_wrapped':
self.__dict__.clear()
else:
self.__dict__.pop(name, None)
super().__setattr__(name, value)
def __delattr__(self, name):
"""Delete a setting and clear it from cache if needed."""
super().__delattr__(name)
self.__dict__.pop(name, None)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
if not name.isupper():
raise TypeError('Setting %r must be uppercase.' % name)
setattr(holder, name, value)
self._wrapped = holder
@staticmethod
def _add_script_prefix(value):
"""
Add SCRIPT_NAME prefix to relative paths.
Useful when the app is being served at a subpath and manually prefixing
subpath to STATIC_URL and MEDIA_URL in settings is inconvenient.
"""
# Don't apply prefix to absolute paths and URLs.
if value.startswith(('http://', 'https://', '/')):
return value
from django.urls import get_script_prefix
return '%s%s' % (get_script_prefix(), value)
@property
def configured(self):
"""Return True if the settings have already been configured."""
return self._wrapped is not empty
@property
def USE_L10N(self):
stack = traceback.extract_stack()
# Show a warning if the setting is used outside of Django.
# Stack index: -1 this line, -2 the caller.
filename, _, _, _ = stack[-2]
if not filename.startswith(os.path.dirname(django.__file__)):
warnings.warn(
USE_L10N_DEPRECATED_MSG,
RemovedInDjango50Warning,
stacklevel=2,
)
return self.__getattr__('USE_L10N')
# RemovedInDjango50Warning.
@property
def _USE_L10N_INTERNAL(self):
# Special hook to avoid checking a traceback in internal use on hot
# paths.
return self.__getattr__('USE_L10N')
class Settings:
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):
if setting.isupper():
setattr(self, setting, getattr(global_settings, setting))
# store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
mod = importlib.import_module(self.SETTINGS_MODULE)
tuple_settings = (
'ALLOWED_HOSTS',
"INSTALLED_APPS",
"TEMPLATE_DIRS",
"LOCALE_PATHS",
)
self._explicit_settings = set()
for setting in dir(mod):
if setting.isupper():
setting_value = getattr(mod, setting)
if (setting in tuple_settings and
not isinstance(setting_value, (list, tuple))):
raise ImproperlyConfigured("The %s setting must be a list or a tuple." % setting)
setattr(self, setting, setting_value)
self._explicit_settings.add(setting)
if self.USE_TZ is False and not self.is_overridden('USE_TZ'):
warnings.warn(
'The default value of USE_TZ will change from False to True '
'in Django 5.0. Set USE_TZ to False in your project settings '
'if you want to keep the current default behavior.',
category=RemovedInDjango50Warning,
)
if self.is_overridden('USE_DEPRECATED_PYTZ'):
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = Path('/usr/share/zoneinfo')
zone_info_file = zoneinfo_root.joinpath(*self.TIME_ZONE.split('/'))
if zoneinfo_root.exists() and not zone_info_file.exists():
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
if self.is_overridden('USE_L10N'):
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
def is_overridden(self, setting):
return setting in self._explicit_settings
def __repr__(self):
return '<%(cls)s "%(settings_module)s">' % {
'cls': self.__class__.__name__,
'settings_module': self.SETTINGS_MODULE,
}
class UserSettingsHolder:
"""Holder for user configured settings."""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if not name.isupper() or name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
if name == 'USE_L10N':
warnings.warn(USE_L10N_DEPRECATED_MSG, RemovedInDjango50Warning)
super().__setattr__(name, value)
if name == 'USE_DEPRECATED_PYTZ':
warnings.warn(USE_DEPRECATED_PYTZ_DEPRECATED_MSG, RemovedInDjango50Warning)
def __delattr__(self, name):
self._deleted.add(name)
if hasattr(self, name):
super().__delattr__(name)
def __dir__(self):
return sorted(
s for s in [*self.__dict__, *dir(self.default_settings)]
if s not in self._deleted
)
def is_overridden(self, setting):
deleted = (setting in self._deleted)
set_locally = (setting in self.__dict__)
set_on_default = getattr(self.default_settings, 'is_overridden', lambda s: False)(setting)
return deleted or set_locally or set_on_default
def __repr__(self):
return '<%(cls)s>' % {
'cls': self.__class__.__name__,
}
settings = LazySettings()
| {
"content_hash": "26c2e6a6d6c9c8afb46745e427a2c516",
"timestamp": "",
"source": "github",
"line_count": 284,
"max_line_length": 101,
"avg_line_length": 37.686619718309856,
"alnum_prop": 0.619078762963655,
"repo_name": "ar4s/django",
"id": "80f3115d47645707b18a1a294ab68ca51ad66d25",
"size": "10703",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "django/conf/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
} |
def main_fun(args, ctx):
import numpy as np
import tensorflow as tf
import tensorflow_datasets as tfds
from tensorflowonspark import TFNode
tfds.disable_progress_bar()
BUFFER_SIZE = args.buffer_size
BATCH_SIZE = args.batch_size
LEARNING_RATE = args.learning_rate
tf_feed = TFNode.DataFeed(ctx.mgr)
def rdd_generator():
while not tf_feed.should_stop():
batch = tf_feed.next_batch(1)
if len(batch) > 0:
example = batch[0]
image = np.array(example[0]).astype(np.float32) / 255.0
image = np.reshape(image, (28, 28, 1))
label = np.array(example[1]).astype(np.float32)
label = np.reshape(label, (1,))
yield (image, label)
else:
return
def input_fn(mode, input_context=None):
if mode == tf.estimator.ModeKeys.TRAIN:
# Note: Spark is responsible for feeding data via streaming RDD
ds = tf.data.Dataset.from_generator(rdd_generator, (tf.float32, tf.float32), (tf.TensorShape([28, 28, 1]), tf.TensorShape([1])))
return ds.batch(BATCH_SIZE)
else:
raise Exception("I'm evaluating: mode={}, input_context={}".format(mode, input_context))
def scale(image, label):
image = tf.cast(image, tf.float32) / 255.0
return image, label
mnist = tfds.load(name='mnist', with_info=True, as_supervised=True)
ds = mnist['test']
if input_context:
ds = ds.shard(input_context.num_input_pipelines, input_context.input_pipeline_id)
return ds.map(scale).batch(BATCH_SIZE)
def serving_input_receiver_fn():
features = tf.compat.v1.placeholder(dtype=tf.float32, shape=[None, 28, 28, 1], name='features')
receiver_tensors = {'conv2d_input': features}
return tf.estimator.export.ServingInputReceiver(receiver_tensors, receiver_tensors)
def model_fn(features, labels, mode):
model = tf.keras.Sequential([
tf.keras.layers.Conv2D(32, 3, activation='relu', input_shape=(28, 28, 1)),
tf.keras.layers.MaxPooling2D(),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(64, activation='relu'),
tf.keras.layers.Dense(10, activation='softmax')
])
logits = model(features, training=False)
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {'logits': logits}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
optimizer = tf.compat.v1.train.GradientDescentOptimizer(
learning_rate=LEARNING_RATE)
loss = tf.keras.losses.SparseCategoricalCrossentropy(
from_logits=True, reduction=tf.keras.losses.Reduction.NONE)(labels, logits)
loss = tf.reduce_sum(input_tensor=loss) * (1. / BATCH_SIZE)
if mode == tf.estimator.ModeKeys.EVAL:
return tf.estimator.EstimatorSpec(mode, loss=loss)
return tf.estimator.EstimatorSpec(
mode=mode,
loss=loss,
train_op=optimizer.minimize(
loss, tf.compat.v1.train.get_or_create_global_step()))
# Note: the original example used MultiWorkerMirroredStrategy which is a synchronous training strategy.
# Since streaming data arrives irregularly, we must use the asynchronous ParameterServerStrategy
# to allow data to be processed as it arrives and to avoid deadlocks.
# strategy = tf.distribute.experimental.MultiWorkerMirroredStrategy()
strategy = tf.distribute.experimental.ParameterServerStrategy()
config = tf.estimator.RunConfig(train_distribute=strategy, save_checkpoints_steps=100)
classifier = tf.estimator.Estimator(
model_fn=model_fn, model_dir=args.model_dir, config=config)
# exporter = tf.estimator.FinalExporter("serving", serving_input_receiver_fn=serving_input_receiver_fn)
tf.estimator.train_and_evaluate(
classifier,
train_spec=tf.estimator.TrainSpec(input_fn=input_fn),
eval_spec=tf.estimator.EvalSpec(input_fn=input_fn)
# eval_spec=tf.estimator.EvalSpec(input_fn=input_fn, exporters=exporter)
)
if ctx.job_name == 'chief':
print("Exporting saved_model to {}".format(args.export_dir))
classifier.export_saved_model(args.export_dir, serving_input_receiver_fn)
if __name__ == "__main__":
from pyspark.context import SparkContext
from pyspark.conf import SparkConf
from pyspark.streaming import StreamingContext
from tensorflowonspark import TFCluster
import argparse
sc = SparkContext(conf=SparkConf().setAppName("mnist_estimator"))
ssc = StreamingContext(sc, 60) # group data into intervals of one minute
executors = sc._conf.get("spark.executor.instances")
num_executors = int(executors) if executors is not None else 1
parser = argparse.ArgumentParser()
parser.add_argument("--batch_size", help="number of records per batch", type=int, default=64)
parser.add_argument("--buffer_size", help="size of shuffle buffer", type=int, default=10000)
parser.add_argument("--cluster_size", help="number of nodes in the cluster", type=int, default=num_executors)
parser.add_argument("--images_labels", help="path to MNIST images and labels in parallelized format")
parser.add_argument("--learning_rate", help="learning rate", type=float, default=1e-3)
parser.add_argument("--model_dir", help="path to save checkpoint", default="mnist_model")
parser.add_argument("--tensorboard", help="launch tensorboard process", action="store_true")
args = parser.parse_args()
print("args:", args)
# create RDD of input data
def parse(ln):
vec = [int(x) for x in ln.split(',')]
return (vec[1:], vec[0])
stream = ssc.textFileStream(args.images_labels)
images_labels = stream.map(parse)
cluster = TFCluster.run(sc, main_fun, args, args.cluster_size, num_ps=1, tensorboard=args.tensorboard, input_mode=TFCluster.InputMode.SPARK, log_dir=args.model_dir, master_node='chief')
cluster.train(images_labels, feed_timeout=86400) # extend feed timeout to 24hrs for streaming data to arrive
ssc.start()
cluster.shutdown(ssc)
| {
"content_hash": "54e2afbfadd089448364be4c05e433a8",
"timestamp": "",
"source": "github",
"line_count": 139,
"max_line_length": 187,
"avg_line_length": 42.39568345323741,
"alnum_prop": 0.7013405735618531,
"repo_name": "yahoo/TensorFlowOnSpark",
"id": "16deb4b68f3161b14403de2e79a5f8dee6dd5382",
"size": "5992",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/mnist/estimator/mnist_spark_streaming.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "208390"
},
{
"name": "Scala",
"bytes": "46003"
},
{
"name": "Shell",
"bytes": "4410"
}
],
"symlink_target": ""
} |
'''
Integration Test Teardown case
@author: Youyk
'''
from zstackwoodpecker import test_util
from zstackwoodpecker import clean_util
import zstacklib.utils.shell as shell
import suite_setup
import zstackwoodpecker.test_lib as test_lib
def test():
clean_util.cleanup_all_vms_violently()
clean_util.cleanup_none_vm_volumes_violently()
clean_util.umount_all_primary_storages_violently()
clean_util.cleanup_backup_storage()
clean_util.delete_pxe()
shell.call('pkill vbmc')
test_lib.setup_plan.stop_node()
test_util.test_pass('Test Teardown Success')
| {
"content_hash": "b5f7cd817a814bfb1ef04a05d20454aa",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 54,
"avg_line_length": 26.454545454545453,
"alnum_prop": 0.7474226804123711,
"repo_name": "zstackio/zstack-woodpecker",
"id": "00cd26088b277ae1adaff7502bc45c0a033c246e",
"size": "582",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "integrationtest/vm/baremetal/suite_teardown.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2356"
},
{
"name": "Go",
"bytes": "49822"
},
{
"name": "Makefile",
"bytes": "687"
},
{
"name": "Puppet",
"bytes": "875"
},
{
"name": "Python",
"bytes": "13070596"
},
{
"name": "Shell",
"bytes": "177861"
}
],
"symlink_target": ""
} |
try:
from django.conf.urls import patterns, url
except ImportError: # deprecated since Django 1.4
from django.conf.urls.defaults import patterns, url
from .api import routes
urlpatterns = patterns('',
url(r'^device/$', routes.device, name='ios-notifications-device-create'),
url(r'^device/(?P<token>\w+)/(?P<service__id>\d+)/$', routes.device, name='ios-notifications-device'),
)
| {
"content_hash": "6a0169da90e99eb26a8aceb4029dadf9",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 106,
"avg_line_length": 36.27272727272727,
"alnum_prop": 0.6992481203007519,
"repo_name": "nautilebleu/django-ios-notifications",
"id": "83bb09049aaefc0aa43024ffc6b480909180701b",
"size": "424",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "ios_notifications/urls.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "1838"
},
{
"name": "Python",
"bytes": "102249"
},
{
"name": "Shell",
"bytes": "105"
}
],
"symlink_target": ""
} |
import time
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.web import Application, asynchronous, RequestHandler
from multiprocessing.pool import ThreadPool
from multiprocessing.pool import ApplyResult
from tornado import gen
# https://gist.github.com/methane/2185380 参考
html_content = """
<!DOCTYPE html>
<html lang="zh-CN">
<head>
<script src="http://apps.bdimg.com/libs/jquery/2.1.4/jquery.min.js"></script>
</head>
<body>
<h1>任务测试</h1></br>
<button id="job">开始</button>
</body>
</html>
<script type="text/javascript">
function job_check(timer,tid) {
$.ajax({
type: "GET",
url: "job_check?tid="+tid,
success: function(msg){
console.log(msg);
if(msg != ""){
alert( "任务结果: " + msg );
clearInterval(timer);//结束轮询
}
}
});
}
jQuery(function($) {
$("#job").click( function () {
$.ajax({
type: "GET",
url: "add_job",
success: function(tid){
alert( "开始任务: " + tid );
timer = setInterval(function(){
console.log("run.");
job_check(timer,tid);
},1000);
}
});
});
})
</script>
"""
class MainPage(RequestHandler):
def get(self):
self.write(html_content)
_workers = ThreadPool(10)
_result = {}
# 后台任务。
def blocking_task(n, tid):
time.sleep(n)
print(tid)
_result[tid] = {"finish"}
class AddJobHandler(RequestHandler):
@gen.coroutine
def get(self):
tid = str(int(time.time() * 10000))
_workers.apply_async(blocking_task, (10, tid)) # 传递参数 10 秒。
self.write(tid)
self.finish() # 先finish 掉,然后在后台执行。
class JobCheckHandler(RequestHandler):
def get(self):
tid = self.get_argument("tid")
if tid in _result.keys():
out = _result[tid] # 结果
del _result[tid] # 删除tid的数据。
self.write(str(out))
else:
self.write("")
# main 启动。
if __name__ == "__main__":
HTTPServer(Application([
("/", MainPage),
("/add_job", AddJobHandler),
("/job_check", JobCheckHandler)
], debug=True)).listen(9999)
print("start web .")
IOLoop.instance().start()
| {
"content_hash": "25bd1e496c26768432960b6e37a73c78",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 83,
"avg_line_length": 23.784313725490197,
"alnum_prop": 0.5222588623248146,
"repo_name": "pythonstock/stock",
"id": "fd79744dc830aee3408d7712630d608fa9f988cc",
"size": "2581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "web/test_thread.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "482"
},
{
"name": "Dockerfile",
"bytes": "5656"
},
{
"name": "HTML",
"bytes": "28502"
},
{
"name": "JavaScript",
"bytes": "2745"
},
{
"name": "Python",
"bytes": "171750"
},
{
"name": "Shell",
"bytes": "6996"
}
],
"symlink_target": ""
} |
from kivy.properties import StringProperty
from kivy.uix.popup import Popup
from GUI.PopUps.PopUps import ExceptionAlertPopUp
from Utils.FileHandler import *
class FileSavingDialogPopUp(Popup):
"""
A popup functionality to confirm actions
when saving a file that already exists.
"""
destination = StringProperty('')
new_filename = StringProperty('')
original_destination_filename_only = StringProperty('')
folder_name = StringProperty('')
dismiss_button_text = StringProperty('')
def __init__(self, source, destination, filename_list, listener, path,
folder_name, dismiss_button_text):
"""
Constructs a popup
:param source: the source to copy
:param destination: the destinatio to copy the source to
:param filename_list:
:param listener:
:param path:
:param folder_name:
:param dismiss_button_text:
"""
super(FileSavingDialogPopUp, self).__init__()
self.source = source
self.destination_name = destination
self.destination = destination
self.path = path
self.folder_name = folder_name
self.dismiss_button_text = dismiss_button_text
self.media_files = get_filenames_from_path(self.path)
self.new_filename = prefilled_new_file_name(self.destination, self.path)
self.original_destination_filename_only = get_filename_only(destination)
self.ids.save_as.bind(text=self.on_text)
self.filename_list = filename_list
self.listener = listener
def on_text(self, instance, filename):
"""
This function is called every time the bound widget's text-property changes
:param instance: the instance of the Widget
:param filename: the value in the text property
:return:
"""
copy_file_btn = self.ids.copy_file_button
if filename in self.media_files or not check_filename(filename):
copy_file_btn.disabled = True
else:
copy_file_btn.disabled = False
def error(self, exception):
ExceptionAlertPopUp("Error writing file", exception).open() | {
"content_hash": "e40e21168aa1c998ae170ebdb6f24513",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 83,
"avg_line_length": 36.91525423728814,
"alnum_prop": 0.6524334251606979,
"repo_name": "RemuTeam/Remu",
"id": "d886b8c25ad354ac81fc0fe77826a4a8df8671a8",
"size": "2178",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/GUI/PopUps/FileSavingDialogPopUp.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Gherkin",
"bytes": "221"
},
{
"name": "Python",
"bytes": "218196"
}
],
"symlink_target": ""
} |
from .documents import DocumentHTML
from .home import HomeHTML
from .search import SearchHTML
| {
"content_hash": "40efbe62fcda76db981003f17027099c",
"timestamp": "",
"source": "github",
"line_count": 3,
"max_line_length": 35,
"avg_line_length": 31.333333333333332,
"alnum_prop": 0.8404255319148937,
"repo_name": "macbre/wbc.macbre.net",
"id": "31d4b044f095c100a339132b23ec2f050ebc71b4",
"size": "94",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "app/wbc/views/html/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "6397"
},
{
"name": "Dockerfile",
"bytes": "1039"
},
{
"name": "HTML",
"bytes": "6238"
},
{
"name": "JavaScript",
"bytes": "2908"
},
{
"name": "Makefile",
"bytes": "1448"
},
{
"name": "Python",
"bytes": "43969"
},
{
"name": "Shell",
"bytes": "166"
}
],
"symlink_target": ""
} |
import sys
import eventlet
eventlet.monkey_patch()
import netaddr
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent import l3_agent
from neutron.agent import l3_ha_agent
from neutron.agent.linux import external_process
from neutron.agent.linux import interface
from neutron.agent.linux import ip_lib
from neutron.common import config as common_config
from neutron.common import constants as l3_constants
from neutron.common import topics
from neutron.openstack.common import log as logging
from neutron.openstack.common import service
from neutron import service as neutron_service
from neutron.services.firewall.agents.l3reference import firewall_l3_agent
from neutron.services.firewall.agents.varmour import varmour_api
from neutron.services.firewall.agents.varmour import varmour_utils as va_utils
LOG = logging.getLogger(__name__)
class vArmourL3NATAgent(l3_agent.L3NATAgent,
firewall_l3_agent.FWaaSL3AgentRpcCallback):
def __init__(self, host, conf=None):
LOG.debug(_('vArmourL3NATAgent: __init__'))
self.rest = varmour_api.vArmourRestAPI()
super(vArmourL3NATAgent, self).__init__(host, conf)
def _destroy_router_namespaces(self, only_router_id=None):
return
def _destroy_router_namespace(self, namespace):
return
def _create_router_namespace(self, ri):
return
def _router_added(self, router_id, router):
LOG.debug(_("_router_added: %s"), router_id)
ri = l3_agent.RouterInfo(router_id, self.root_helper,
self.conf.use_namespaces, router)
self.router_info[router_id] = ri
super(vArmourL3NATAgent, self).process_router_add(ri)
def _router_removed(self, router_id):
LOG.debug(_("_router_removed: %s"), router_id)
ri = self.router_info[router_id]
if ri:
ri.router['gw_port'] = None
ri.router[l3_constants.INTERFACE_KEY] = []
ri.router[l3_constants.FLOATINGIP_KEY] = []
self.process_router(ri)
name = va_utils.get_snat_rule_name(ri)
self.rest.del_cfg_objs(va_utils.REST_URL_CONF_NAT_RULE, name)
name = va_utils.get_dnat_rule_name(ri)
self.rest.del_cfg_objs(va_utils.REST_URL_CONF_NAT_RULE, name)
name = va_utils.get_trusted_zone_name(ri)
self._va_unset_zone_interfaces(name, True)
name = va_utils.get_untrusted_zone_name(ri)
self._va_unset_zone_interfaces(name, True)
del self.router_info[router_id]
def _spawn_metadata_proxy(self, router_id, ns_name):
return
def _destroy_metadata_proxy(self, router_id, ns_name):
return
def _set_subnet_info(self, port):
ips = port['fixed_ips']
if not ips:
raise Exception(_("Router port %s has no IP address") % port['id'])
return
if len(ips) > 1:
LOG.warn(_("Ignoring multiple IPs on router port %s"), port['id'])
prefixlen = netaddr.IPNetwork(port['subnet']['cidr']).prefixlen
port['ip_cidr'] = "%s/%s" % (ips[0]['ip_address'], prefixlen)
def _va_unset_zone_interfaces(self, zone_name, remove_zone=False):
# return True if zone exists; otherwise, return False
LOG.debug(_("_va_unset_zone_interfaces: %s"), zone_name)
resp = self.rest.rest_api('GET', va_utils.REST_URL_CONF_ZONE)
if resp and resp['status'] == 200:
zlist = resp['body']['response']
for zn in zlist:
if zn == zone_name:
commit = False
if 'interface' in zlist[zn]:
for intf in zlist[zn]['interface']:
self.rest.rest_api('DELETE',
va_utils.REST_URL_CONF +
va_utils.REST_ZONE_NAME % zn +
va_utils.REST_INTF_NAME % intf)
commit = True
if remove_zone:
self.rest.rest_api('DELETE',
va_utils.REST_URL_CONF +
va_utils.REST_ZONE_NAME % zn)
commit = True
if commit:
self.rest.commit()
return True
return False
def _va_pif_2_lif(self, pif):
return pif + '.0'
def _va_set_interface_ip(self, pif, cidr):
LOG.debug(_("_va_set_interface_ip: %(pif)s %(cidr)s"),
{'pif': pif, 'cidr': cidr})
lif = self._va_pif_2_lif(pif)
obj = va_utils.REST_INTF_NAME % pif + va_utils.REST_LOGIC_NAME % lif
body = {
'name': lif,
'family': 'ipv4',
'address': cidr
}
self.rest.rest_api('PUT', va_utils.REST_URL_CONF + obj, body)
def _va_get_port_name(self, port_list, name):
if name:
for p in port_list:
if p['VM name'] == name:
return p['name']
def _va_config_trusted_zone(self, ri, plist):
zone = va_utils.get_trusted_zone_name(ri)
LOG.debug(_("_va_config_trusted_zone: %s"), zone)
body = {
'name': zone,
'type': 'L3',
'interface': []
}
if not self._va_unset_zone_interfaces(zone):
# if zone doesn't exist, create it
self.rest.rest_api('POST', va_utils.REST_URL_CONF_ZONE, body)
self.rest.commit()
# add new internal ports to trusted zone
for p in ri.internal_ports:
if p['admin_state_up']:
dev = self.get_internal_device_name(p['id'])
pif = self._va_get_port_name(plist, dev)
if pif:
lif = self._va_pif_2_lif(pif)
if lif not in body['interface']:
body['interface'].append(lif)
self._va_set_interface_ip(pif, p['ip_cidr'])
if body['interface']:
self.rest.rest_api('PUT', va_utils.REST_URL_CONF_ZONE, body)
self.rest.commit()
def _va_config_untrusted_zone(self, ri, plist):
zone = va_utils.get_untrusted_zone_name(ri)
LOG.debug(_("_va_config_untrusted_zone: %s"), zone)
body = {
'name': zone,
'type': 'L3',
'interface': []
}
if not self._va_unset_zone_interfaces(zone):
# if zone doesn't exist, create it
self.rest.rest_api('POST', va_utils.REST_URL_CONF_ZONE, body)
self.rest.commit()
# add new gateway ports to untrusted zone
if ri.ex_gw_port:
LOG.debug(_("_va_config_untrusted_zone: gw=%r"), ri.ex_gw_port)
dev = self.get_external_device_name(ri.ex_gw_port['id'])
pif = self._va_get_port_name(plist, dev)
if pif:
lif = self._va_pif_2_lif(pif)
self._va_set_interface_ip(pif, ri.ex_gw_port['ip_cidr'])
body['interface'].append(lif)
self.rest.rest_api('PUT', va_utils.REST_URL_CONF_ZONE, body)
self.rest.commit()
def _va_config_router_snat_rules(self, ri, plist):
LOG.debug(_('_va_config_router_snat_rules: %s'), ri.router['id'])
prefix = va_utils.get_snat_rule_name(ri)
self.rest.del_cfg_objs(va_utils.REST_URL_CONF_NAT_RULE, prefix)
if not ri.enable_snat:
return
for idx, p in enumerate(ri.internal_ports):
if p['admin_state_up']:
dev = self.get_internal_device_name(p['id'])
pif = self._va_get_port_name(plist, dev)
if pif:
net = netaddr.IPNetwork(p['ip_cidr'])
body = {
'name': '%s_%d' % (prefix, idx),
'ingress-context-type': 'interface',
'ingress-index': self._va_pif_2_lif(pif),
'source-address': [
[str(netaddr.IPAddress(net.first + 2)),
str(netaddr.IPAddress(net.last - 1))]
],
'flag': 'interface translate-source'
}
self.rest.rest_api('POST',
va_utils.REST_URL_CONF_NAT_RULE,
body)
if ri.internal_ports:
self.rest.commit()
def _va_config_floating_ips(self, ri):
LOG.debug(_('_va_config_floating_ips: %s'), ri.router['id'])
prefix = va_utils.get_dnat_rule_name(ri)
self.rest.del_cfg_objs(va_utils.REST_URL_CONF_NAT_RULE, prefix)
# add new dnat rules
for idx, fip in enumerate(ri.floating_ips):
body = {
'name': '%s_%d' % (prefix, idx),
'ingress-context-type': 'zone',
'ingress-index': va_utils.get_untrusted_zone_name(ri),
'destination-address': [[fip['floating_ip_address'],
fip['floating_ip_address']]],
'static': [fip['fixed_ip_address'], fip['fixed_ip_address']],
'flag': 'translate-destination'
}
self.rest.rest_api('POST', va_utils.REST_URL_CONF_NAT_RULE, body)
if ri.floating_ips:
self.rest.commit()
def process_router(self, ri):
LOG.debug(_("process_router: %s"), ri.router['id'])
super(vArmourL3NATAgent, self).process_router(ri)
self.rest.auth()
# read internal port name and configuration port name map
resp = self.rest.rest_api('GET', va_utils.REST_URL_INTF_MAP)
if resp and resp['status'] == 200:
try:
plist = resp['body']['response']
except ValueError:
LOG.warn(_("Unable to parse interface mapping."))
return
else:
LOG.warn(_("Unable to read interface mapping."))
return
if ri.ex_gw_port:
self._set_subnet_info(ri.ex_gw_port)
self._va_config_trusted_zone(ri, plist)
self._va_config_untrusted_zone(ri, plist)
self._va_config_router_snat_rules(ri, plist)
self._va_config_floating_ips(ri)
def _handle_router_snat_rules(self, ri, ex_gw_port, internal_cidrs,
interface_name, action):
return
def _send_gratuitous_arp_packet(self, ri, interface_name, ip_address):
return
def external_gateway_added(self, ri, ex_gw_port,
interface_name, internal_cidrs):
LOG.debug(_("external_gateway_added: %s"), ri.router['id'])
if not ip_lib.device_exists(interface_name,
root_helper=self.root_helper,
namespace=ri.ns_name):
self.driver.plug(ex_gw_port['network_id'],
ex_gw_port['id'], interface_name,
ex_gw_port['mac_address'],
bridge=self.conf.external_network_bridge,
namespace=ri.ns_name,
prefix=l3_agent.EXTERNAL_DEV_PREFIX)
self.driver.init_l3(interface_name, [ex_gw_port['ip_cidr']],
namespace=ri.ns_name)
def _update_routing_table(self, ri, operation, route):
return
class vArmourL3NATAgentWithStateReport(vArmourL3NATAgent,
l3_agent.L3NATAgentWithStateReport):
pass
def main():
conf = cfg.CONF
conf.register_opts(vArmourL3NATAgent.OPTS)
conf.register_opts(l3_ha_agent.OPTS)
config.register_interface_driver_opts_helper(conf)
config.register_use_namespaces_opts_helper(conf)
config.register_agent_state_opts_helper(conf)
config.register_root_helper(conf)
conf.register_opts(interface.OPTS)
conf.register_opts(external_process.OPTS)
common_config.init(sys.argv[1:])
config.setup_logging()
server = neutron_service.Service.create(
binary='neutron-l3-agent',
topic=topics.L3_AGENT,
report_interval=cfg.CONF.AGENT.report_interval,
manager='neutron.services.firewall.agents.varmour.varmour_router.'
'vArmourL3NATAgentWithStateReport')
service.launch(server).wait()
| {
"content_hash": "fccff6cf5f8a9498661017d96b58aa72",
"timestamp": "",
"source": "github",
"line_count": 333,
"max_line_length": 79,
"avg_line_length": 37.732732732732735,
"alnum_prop": 0.5393553521687227,
"repo_name": "nash-x/hws",
"id": "7ac398d18a3bbac880b53d8413c7818b54d31364",
"size": "13202",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "neutron/services/firewall/agents/varmour/varmour_router.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1043"
},
{
"name": "PLpgSQL",
"bytes": "12782"
},
{
"name": "Python",
"bytes": "20443623"
},
{
"name": "Shell",
"bytes": "4643"
}
],
"symlink_target": ""
} |
"""Support for SSL in PyMongo."""
import atexit
import sys
import threading
from bson.py3compat import string_type
from pymongo.errors import ConfigurationError
HAVE_SSL = True
try:
import pymongo.pyopenssl_context as _ssl
except ImportError:
try:
import pymongo.ssl_context as _ssl
except ImportError:
HAVE_SSL = False
HAVE_CERTIFI = False
try:
import certifi
HAVE_CERTIFI = True
except ImportError:
pass
HAVE_WINCERTSTORE = False
try:
from wincertstore import CertFile
HAVE_WINCERTSTORE = True
except ImportError:
pass
_WINCERTSLOCK = threading.Lock()
_WINCERTS = None
if HAVE_SSL:
# Note: The validate* functions below deal with users passing
# CPython ssl module constants to configure certificate verification
# at a high level. This is legacy behavior, but requires us to
# import the ssl module even if we're only using it for this purpose.
import ssl as _stdlibssl
from ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
HAS_SNI = _ssl.HAS_SNI
IPADDR_SAFE = _ssl.IS_PYOPENSSL or sys.version_info[:2] >= (3, 7)
SSLError = _ssl.SSLError
def validate_cert_reqs(option, value):
"""Validate the cert reqs are valid. It must be None or one of the
three values ``ssl.CERT_NONE``, ``ssl.CERT_OPTIONAL`` or
``ssl.CERT_REQUIRED``.
"""
if value is None:
return value
if isinstance(value, string_type) and hasattr(_stdlibssl, value):
value = getattr(_stdlibssl, value)
if value in (CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED):
return value
raise ValueError("The value of %s must be one of: "
"`ssl.CERT_NONE`, `ssl.CERT_OPTIONAL` or "
"`ssl.CERT_REQUIRED`" % (option,))
def validate_allow_invalid_certs(option, value):
"""Validate the option to allow invalid certificates is valid."""
# Avoid circular import.
from pymongo.common import validate_boolean_or_string
boolean_cert_reqs = validate_boolean_or_string(option, value)
if boolean_cert_reqs:
return CERT_NONE
return CERT_REQUIRED
def _load_wincerts():
"""Set _WINCERTS to an instance of wincertstore.Certfile."""
global _WINCERTS
certfile = CertFile()
certfile.addstore("CA")
certfile.addstore("ROOT")
atexit.register(certfile.close)
_WINCERTS = certfile
def get_ssl_context(*args):
"""Create and return an SSLContext object."""
(certfile,
keyfile,
passphrase,
ca_certs,
cert_reqs,
crlfile,
match_hostname,
check_ocsp_endpoint) = args
verify_mode = CERT_REQUIRED if cert_reqs is None else cert_reqs
ctx = _ssl.SSLContext(_ssl.PROTOCOL_SSLv23)
# SSLContext.check_hostname was added in CPython 2.7.9 and 3.4.
if hasattr(ctx, "check_hostname"):
if _ssl.CHECK_HOSTNAME_SAFE and verify_mode != CERT_NONE:
ctx.check_hostname = match_hostname
else:
ctx.check_hostname = False
if hasattr(ctx, "check_ocsp_endpoint"):
ctx.check_ocsp_endpoint = check_ocsp_endpoint
if hasattr(ctx, "options"):
# Explicitly disable SSLv2, SSLv3 and TLS compression. Note that
# up to date versions of MongoDB 2.4 and above already disable
# SSLv2 and SSLv3, python disables SSLv2 by default in >= 2.7.7
# and >= 3.3.4 and SSLv3 in >= 3.4.3.
ctx.options |= _ssl.OP_NO_SSLv2
ctx.options |= _ssl.OP_NO_SSLv3
ctx.options |= _ssl.OP_NO_COMPRESSION
ctx.options |= _ssl.OP_NO_RENEGOTIATION
if certfile is not None:
try:
ctx.load_cert_chain(certfile, keyfile, passphrase)
except _ssl.SSLError as exc:
raise ConfigurationError(
"Private key doesn't match certificate: %s" % (exc,))
if crlfile is not None:
if _ssl.IS_PYOPENSSL:
raise ConfigurationError(
"ssl_crlfile cannot be used with PyOpenSSL")
if not hasattr(ctx, "verify_flags"):
raise ConfigurationError(
"Support for ssl_crlfile requires "
"python 2.7.9+ (pypy 2.5.1+) or 3.4+")
# Match the server's behavior.
ctx.verify_flags = getattr(_ssl, "VERIFY_CRL_CHECK_LEAF", 0)
ctx.load_verify_locations(crlfile)
if ca_certs is not None:
ctx.load_verify_locations(ca_certs)
elif cert_reqs != CERT_NONE:
# CPython >= 2.7.9 or >= 3.4.0, pypy >= 2.5.1
if hasattr(ctx, "load_default_certs"):
ctx.load_default_certs()
# Python >= 3.2.0, useless on Windows.
elif (sys.platform != "win32" and
hasattr(ctx, "set_default_verify_paths")):
ctx.set_default_verify_paths()
elif sys.platform == "win32" and HAVE_WINCERTSTORE:
with _WINCERTSLOCK:
if _WINCERTS is None:
_load_wincerts()
ctx.load_verify_locations(_WINCERTS.name)
elif HAVE_CERTIFI:
ctx.load_verify_locations(certifi.where())
else:
raise ConfigurationError(
"`ssl_cert_reqs` is not ssl.CERT_NONE and no system "
"CA certificates could be loaded. `ssl_ca_certs` is "
"required.")
ctx.verify_mode = verify_mode
return ctx
else:
class SSLError(Exception):
pass
HAS_SNI = False
IPADDR_SAFE = False
def validate_cert_reqs(option, dummy):
"""No ssl module, raise ConfigurationError."""
raise ConfigurationError("The value of %s is set but can't be "
"validated. The ssl module is not available"
% (option,))
def validate_allow_invalid_certs(option, dummy):
"""No ssl module, raise ConfigurationError."""
return validate_cert_reqs(option, dummy)
def get_ssl_context(*dummy):
"""No ssl module, raise ConfigurationError."""
raise ConfigurationError("The ssl module is not available.")
| {
"content_hash": "b076f6b99aadf9c2cadfc6e20f25fb3f",
"timestamp": "",
"source": "github",
"line_count": 170,
"max_line_length": 77,
"avg_line_length": 37.65882352941176,
"alnum_prop": 0.5852858481724461,
"repo_name": "ASMlover/study",
"id": "b5847244c3715f1684d0f249fcae218e28df4a39",
"size": "6986",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/pyluna/libs/pymongo/ssl_support.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Assembly",
"bytes": "3055440"
},
{
"name": "Batchfile",
"bytes": "4662"
},
{
"name": "Brainfuck",
"bytes": "571"
},
{
"name": "C",
"bytes": "13569580"
},
{
"name": "C#",
"bytes": "3959"
},
{
"name": "C++",
"bytes": "14741264"
},
{
"name": "CMake",
"bytes": "543917"
},
{
"name": "CSS",
"bytes": "11505"
},
{
"name": "Common Lisp",
"bytes": "114"
},
{
"name": "Emacs Lisp",
"bytes": "6042"
},
{
"name": "Go",
"bytes": "105203"
},
{
"name": "Groovy",
"bytes": "2907"
},
{
"name": "HTML",
"bytes": "911945"
},
{
"name": "Lex",
"bytes": "9370"
},
{
"name": "Lua",
"bytes": "32829"
},
{
"name": "Makefile",
"bytes": "1000611"
},
{
"name": "NASL",
"bytes": "3609"
},
{
"name": "NewLisp",
"bytes": "5805"
},
{
"name": "Perl",
"bytes": "594"
},
{
"name": "Python",
"bytes": "2752752"
},
{
"name": "SWIG",
"bytes": "91"
},
{
"name": "Shell",
"bytes": "9993"
},
{
"name": "Vim script",
"bytes": "92204"
},
{
"name": "Yacc",
"bytes": "6278"
}
],
"symlink_target": ""
} |
"""Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
from nipype.interfaces.base import (
TraitedSpec, BaseInterface, File)
from nipype.utils.filemanip import split_filename
import os.path as op
import nibabel as nb
import numpy as np
from nipype.utils.misc import package_check
import warnings
from ... import logging
iflogger = logging.getLogger('interface')
have_dipy = True
try:
package_check('dipy', version='0.6.0')
except Exception, e:
have_dipy = False
else:
import dipy.reconst.dti as dti
from dipy.core.gradients import gradient_table
from dipy.io.utils import nifti1_symmat
def tensor_fitting(data, bvals, bvecs, mask_file=None):
"""
Use dipy to fit DTI
Parameters
----------
in_file : str
Full path to a DWI data file.
bvals : str
Full path to a file containing gradient magnitude information (b-values).
bvecs : str
Full path to a file containing gradient direction information (b-vectors).
mask_file : str, optional
Full path to a file containing a binary mask. Defaults to use the entire volume.
Returns
-------
TensorFit object, affine
"""
img = nb.load(in_file).get_data()
data = img.get_data()
affine = img.get_affine()
if mask_file is not None:
mask = nb.load(self.inputs.mask_file).get_data()
else:
mask=None
# Load information about the gradients:
gtab = grad.gradient_table(self.inputs.bvals, self.inputs.bvecs)
# Fit it
tenmodel = dti.TensorModel(gtab)
return tenmodel.fit(data, mask), affine
class DTIInputSpec(TraitedSpec):
in_file = File(exists=True, mandatory=True,
desc='The input 4D diffusion-weighted image file')
bvecs = File(exists=True, mandatory=True,
desc='The input b-vector text file')
bvals = File(exists=True, mandatory=True,
desc='The input b-value text file')
mask_file = File(exists=True, mandatory=False,
desc='An optional white matter mask')
out_filename = File(
genfile=True, desc='The output filename for the DTI parameters image')
class DTIOutputSpec(TraitedSpec):
out_file = File(exists=True)
class DTI(BaseInterface):
"""
Calculates the diffusion tensor model parameters
Example
-------
>>> import nipype.interfaces.dipy as dipy
>>> dti = dipy.DTI()
>>> dti.inputs.in_file = 'diffusion.nii'
>>> dti.inputs.bvecs = 'bvecs'
>>> dti.inputs.bvals = 'bvals'
>>> dti.run() # doctest: +SKIP
"""
input_spec = DTIInputSpec
output_spec = DTIOutputSpec
def _run_interface(self, runtime):
ten_fit, affine = tensor_fitting(self.inputs.in_file,
self.inputs.bvals,
self.inputs.bvecs,
self.inputs.mask_file)
lower_triangular = tenfit.lower_triangular()
img = nifti1_symmat(lower_triangular, affine)
out_file = op.abspath(self._gen_outfilename())
nb.save(img, out_file)
iflogger.info('DTI parameters image saved as {i}'.format(i=out_file))
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = op.abspath(self._gen_outfilename())
return outputs
def _gen_filename(self, name):
if name is 'out_filename':
return self._gen_outfilename()
else:
return None
def _gen_outfilename(self):
_, name, _ = split_filename(self.inputs.in_file)
return name + '_dti.nii'
class TensorModeInputSpec(TraitedSpec):
in_file = File(exists=True, mandatory=True,
desc='The input 4D diffusion-weighted image file')
bvecs = File(exists=True, mandatory=True,
desc='The input b-vector text file')
bvals = File(exists=True, mandatory=True,
desc='The input b-value text file')
mask_file = File(exists=True, mandatory=False,
desc='An optional white matter mask')
out_filename = File(
genfile=True, desc='The output filename for the Tensor mode image')
class TensorModeOutputSpec(TraitedSpec):
out_file = File(exists=True)
class TensorMode(BaseInterface):
"""
Creates a map of the mode of the diffusion tensors given a set of
diffusion-weighted images, as well as their associated b-values and
b-vectors. Fits the diffusion tensors and calculates tensor mode
with Dipy.
.. [1] Daniel B. Ennis and G. Kindlmann, "Orthogonal Tensor
Invariants and the Analysis of Diffusion Tensor Magnetic Resonance
Images", Magnetic Resonance in Medicine, vol. 55, no. 1, pp. 136-146,
2006.
Example
-------
>>> import nipype.interfaces.dipy as dipy
>>> mode = dipy.TensorMode()
>>> mode.inputs.in_file = 'diffusion.nii'
>>> mode.inputs.bvecs = 'bvecs'
>>> mode.inputs.bvals = 'bvals'
>>> mode.run() # doctest: +SKIP
"""
input_spec = TensorModeInputSpec
output_spec = TensorModeOutputSpec
def _run_interface(self, runtime):
ten_fit = tensor_fitting(self.inputs.in_file, self.inputs.bvals, self.inputs.bvecs,
self.inputs.mask_file)
## Write as a 3D Nifti image with the original affine
img = nb.Nifti1Image(tenfit.mode, affine)
out_file = op.abspath(self._gen_outfilename())
nb.save(img, out_file)
iflogger.info('Tensor mode image saved as {i}'.format(i=out_file))
return runtime
def _list_outputs(self):
outputs = self._outputs().get()
outputs['out_file'] = op.abspath(self._gen_outfilename())
return outputs
def _gen_filename(self, name):
if name is 'out_filename':
return self._gen_outfilename()
else:
return None
def _gen_outfilename(self):
_, name, _ = split_filename(self.inputs.in_file)
return name + '_mode.nii'
| {
"content_hash": "2e6cd1db6e05e705693aad46aa166311",
"timestamp": "",
"source": "github",
"line_count": 194,
"max_line_length": 91,
"avg_line_length": 32.5360824742268,
"alnum_prop": 0.6151774397972116,
"repo_name": "gerddie/nipype",
"id": "ab4e7be3aa347f87cbe2c5413aaedf55a53678dc",
"size": "6336",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nipype/interfaces/dipy/tensors.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "9823"
},
{
"name": "KiCad",
"bytes": "3797"
},
{
"name": "Makefile",
"bytes": "1854"
},
{
"name": "Matlab",
"bytes": "1999"
},
{
"name": "Python",
"bytes": "4610350"
},
{
"name": "Shell",
"bytes": "380"
},
{
"name": "Tcl",
"bytes": "43408"
}
],
"symlink_target": ""
} |
"""
Serve command for Pecan.
"""
from __future__ import print_function
import logging
import os
import sys
import threading
import time
import subprocess
from wsgiref.simple_server import WSGIRequestHandler
from pecan.commands import BaseCommand
from pecan import util
logger = logging.getLogger(__name__)
class ServeCommand(BaseCommand):
"""
Serves a Pecan web application.
This command serves a Pecan web application using the provided
configuration file for the server and application.
"""
arguments = BaseCommand.arguments + ({
'name': '--reload',
'help': 'Watch for changes and automatically reload.',
'default': False,
'action': 'store_true'
},)
def run(self, args):
super(ServeCommand, self).run(args)
app = self.load_app()
self.serve(app, app.config)
def create_subprocess(self):
self.server_process = subprocess.Popen(
[arg for arg in sys.argv if arg != '--reload'],
stdout=sys.stdout, stderr=sys.stderr
)
def watch_and_spawn(self, conf):
from watchdog.observers import Observer
from watchdog.events import (
FileSystemEventHandler, FileSystemMovedEvent, FileModifiedEvent,
DirModifiedEvent
)
print('Monitoring for changes...')
self.create_subprocess()
parent = self
class AggressiveEventHandler(FileSystemEventHandler):
lock = threading.Lock()
def should_reload(self, event):
for t in (
FileSystemMovedEvent, FileModifiedEvent, DirModifiedEvent
):
if isinstance(event, t):
return True
return False
def on_modified(self, event):
if self.should_reload(event) and self.lock.acquire(False):
parent.server_process.kill()
parent.create_subprocess()
time.sleep(1)
self.lock.release()
# Determine a list of file paths to monitor
paths = self.paths_to_monitor(conf)
event_handler = AggressiveEventHandler()
for path, recurse in paths:
observer = Observer()
observer.schedule(
event_handler,
path=path,
recursive=recurse
)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
def paths_to_monitor(self, conf):
paths = []
for package_name in getattr(conf.app, 'modules', []):
module = __import__(package_name, fromlist=['app'])
if hasattr(module, 'app') and hasattr(module.app, 'setup_app'):
paths.append((
os.path.dirname(module.__file__),
True
))
break
paths.append((os.path.dirname(conf.__file__), False))
return paths
def _serve(self, app, conf):
from wsgiref.simple_server import make_server
host, port = conf.server.host, int(conf.server.port)
srv = make_server(
host,
port,
app,
handler_class=PecanWSGIRequestHandler,
)
print('Starting server in PID %s' % os.getpid())
if host == '0.0.0.0':
print(
'serving on 0.0.0.0:%s, view at http://127.0.0.1:%s' %
(port, port)
)
else:
print("serving on http://%s:%s" % (host, port))
try:
srv.serve_forever()
except KeyboardInterrupt:
# allow CTRL+C to shutdown
pass
def serve(self, app, conf):
"""
A very simple approach for a WSGI server.
"""
if self.args.reload:
try:
self.watch_and_spawn(conf)
except ImportError:
print('The `--reload` option requires `watchdog` to be '
'installed.')
print(' $ pip install watchdog')
else:
self._serve(app, conf)
def gunicorn_run():
"""
The ``gunicorn_pecan`` command for launching ``pecan`` applications
"""
try:
from gunicorn.app.wsgiapp import WSGIApplication
except ImportError as exc:
args = exc.args
arg0 = args[0] if args else ''
arg0 += ' (are you sure `gunicorn` is installed?)'
exc.args = (arg0,) + args[1:]
raise
class PecanApplication(WSGIApplication):
def init(self, parser, opts, args):
if len(args) != 1:
parser.error("No configuration file was specified.")
self.cfgfname = os.path.normpath(
os.path.join(os.getcwd(), args[0])
)
self.cfgfname = os.path.abspath(self.cfgfname)
if not os.path.exists(self.cfgfname):
parser.error("Config file not found: %s" % self.cfgfname)
from pecan.configuration import _runtime_conf, set_config
set_config(self.cfgfname, overwrite=True)
# If available, use the host and port from the pecan config file
cfg = {}
if _runtime_conf.get('server'):
server = _runtime_conf['server']
if hasattr(server, 'host') and hasattr(server, 'port'):
cfg['bind'] = '%s:%s' % (
server.host, server.port
)
return cfg
def load(self):
from pecan.deploy import deploy
return deploy(self.cfgfname)
PecanApplication("%(prog)s [OPTIONS] config.py").run()
class PecanWSGIRequestHandler(WSGIRequestHandler, object):
"""
A wsgiref request handler class that allows actual log output depending on
the application configuration.
"""
def __init__(self, *args, **kwargs):
# We set self.path to avoid crashes in log_message() on unsupported
# requests (like "OPTIONS").
self.path = ''
super(PecanWSGIRequestHandler, self).__init__(*args, **kwargs)
def log_message(self, format, *args):
"""
overrides the ``log_message`` method from the wsgiref server so that
normal logging works with whatever configuration the application has
been set to.
Levels are inferred from the HTTP status code, 4XX codes are treated as
warnings, 5XX as errors and everything else as INFO level.
"""
code = args[1][0]
levels = {
'4': 'warning',
'5': 'error'
}
log_handler = getattr(logger, levels.get(code, 'info'))
log_handler(format % args)
| {
"content_hash": "ead40552b021b6ff97fc81ac158df8b8",
"timestamp": "",
"source": "github",
"line_count": 229,
"max_line_length": 79,
"avg_line_length": 29.812227074235807,
"alnum_prop": 0.5472389043503735,
"repo_name": "ryanpetrello/pecan",
"id": "6f19ed46a5d9c97c2061c63c8bc2a7995785d1c0",
"size": "6827",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "pecan/commands/serve.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "569"
},
{
"name": "HTML",
"bytes": "4281"
},
{
"name": "Python",
"bytes": "458451"
}
],
"symlink_target": ""
} |
from numpy import *
from plotBoundary import *
# import your SVM training code
# parameters
name = 'ls'
print '======Training======'
# load data from csv files
train = loadtxt('data/data_'+name+'_train.csv')
# use deep copy here to make cvxopt happy
X = train[:, 0:2].copy()
Y = train[:, 2:3].copy()
# Carry out training, primal and/or dual
### TODO ###
# Define the predictSVM(x) function, which uses trained parameters
### TODO ###
# plot training results
plotDecisionBoundary(X, Y, predictSVM, [-1, 0, 1], title = 'SVM Train')
print '======Validation======'
# load data from csv files
validate = loadtxt('data/data_'+name+'_validate.csv')
X = validate[:, 0:2]
Y = validate[:, 2:3]
# plot validation results
plotDecisionBoundary(X, Y, predictSVM, [-1, 0, 1], title = 'SVM Validate')
| {
"content_hash": "96f4b164c6a8d5e2990b8249fa634adb",
"timestamp": "",
"source": "github",
"line_count": 30,
"max_line_length": 74,
"avg_line_length": 26.366666666666667,
"alnum_prop": 0.6649810366624526,
"repo_name": "lycarter/6.867-proj2",
"id": "9645e29bbc688c29b366277add522b2d2cf15a69",
"size": "791",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hw2_resources/svm_test.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "17248"
},
{
"name": "Python",
"bytes": "2558"
},
{
"name": "TeX",
"bytes": "18066"
}
],
"symlink_target": ""
} |
import json
from webtelemetry import settings
class LoggerBase:
"""
Base class for all loggers
"""
class LoggerException(Exception):
pass
def write(self, msg):
raise NotImplementedError()
class ConsoleLogger(LoggerBase):
"""
Outputs the Telemetry Event to stdout
"""
def write(self, msg):
print msg.__str__()
class JsonLogger(LoggerBase):
"""
Outputs the Telemetry Event to a JSON file
"""
def __init__(self, logger_name):
self._json_file = settings.LOGGERS[logger_name]['config']['filename']
def write(self, msg):
# open the file in append mode
with open(self._json_file, 'a') as json_file:
# write out the json message to file
json_file.write(json.dumps(msg))
| {
"content_hash": "143f2ef6bd3d9d7324154440cd8f8244",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 77,
"avg_line_length": 21.62162162162162,
"alnum_prop": 0.61125,
"repo_name": "ResearchComputing/web-telemetry",
"id": "12e791b86147c990b643cbe7427e946e5bf76563",
"size": "800",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "webtelemetry/loggers.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9376"
},
{
"name": "Python",
"bytes": "12052"
}
],
"symlink_target": ""
} |
import random
import unittest
from hearthbreaker.agents.basic_agents import DoNothingAgent
from hearthbreaker.cards.base import SpellCard
from hearthbreaker.engine import Game
from hearthbreaker.tags.card_source import CollectionSource
from hearthbreaker.tags.condition import HasCardName
from tests.agents.testing_agents import SelfSpellTestingAgent, EnemySpellTestingAgent, OneCardPlayingAgent, \
EnemyMinionSpellTestingAgent, CardTestingAgent, PlayAndAttackAgent
from hearthbreaker.constants import CHARACTER_CLASS, MINION_TYPE, CARD_RARITY
from hearthbreaker.replay import playback, Replay
from tests.testing_utils import generate_game_for, StackedDeck, mock
from hearthbreaker.cards import *
class TestDruid(unittest.TestCase):
def setUp(self):
random.seed(1857)
def test_Innervate(self):
game = generate_game_for(Innervate, StonetuskBoar, SelfSpellTestingAgent, DoNothingAgent)
# triggers all four innervate cards the player is holding.
game.play_single_turn()
self.assertEqual(9, game.current_player.mana)
for turn in range(0, 16):
game.play_single_turn()
# The mana should not go over 10 on turn 9 (or any other turn)
self.assertEqual(10, game.current_player.mana)
def test_Claw(self):
testing_env = self
class ClawAgent(EnemySpellTestingAgent):
def do_turn(self, player):
super().do_turn(player)
testing_env.assertEqual(2, game.current_player.hero.calculate_attack())
testing_env.assertEqual(2, game.current_player.hero.armor)
game = generate_game_for(Claw, StonetuskBoar, ClawAgent, OneCardPlayingAgent)
game.pre_game()
game.play_single_turn()
def test_Naturalize(self):
game = generate_game_for(StonetuskBoar, Naturalize, OneCardPlayingAgent, EnemyMinionSpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(5, len(game.other_player.hand))
def test_Savagery(self):
class SavageryAgent(EnemyMinionSpellTestingAgent):
def do_turn(self, player):
if player.mana > 2:
player.hero.power.use()
super().do_turn(player)
game = generate_game_for(Savagery, BloodfenRaptor, SavageryAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
def test_ClawAndSavagery(self):
game = generate_game_for(BloodfenRaptor, [Claw, Claw, Savagery], OneCardPlayingAgent,
EnemyMinionSpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
def test_MarkOfTheWild(self):
game = generate_game_for(MarkOfTheWild, StonetuskBoar, EnemyMinionSpellTestingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, game.other_player.minions[0].calculate_attack())
self.assertEqual(3, game.other_player.minions[0].health)
self.assertEqual(3, game.other_player.minions[0].calculate_max_health())
# Test that this spell is being silenced properly as well
game.other_player.minions[0].silence()
self.assertEqual(1, game.other_player.minions[0].calculate_attack())
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(1, game.other_player.minions[0].calculate_max_health())
def test_PowerOfTheWild(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), PowerOfTheWild()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
# This is a test of the +1/+1 option of the Power Of the Wild Card
game = Game([deck1, deck2], [OneCardPlayingAgent(), OneCardPlayingAgent()])
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].health)
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
self.assertEqual(2, game.current_player.minions[1].calculate_attack())
self.assertEqual(2, game.current_player.minions[1].calculate_max_health())
# This is a test of the "Summon Panther" option of the Power of the Wild Card
agent = OneCardPlayingAgent()
agent.choose_option = lambda options, player: options[1]
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), PowerOfTheWild()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [agent, OneCardPlayingAgent()])
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual("Panther", game.current_player.minions[2].card.__class__.__name__)
self.assertEqual(3, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].calculate_max_health())
def test_WildGrowth(self):
game = generate_game_for(WildGrowth, StonetuskBoar, SelfSpellTestingAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, game.current_player.max_mana)
# Make sure that the case where the player is at 10 mana works as well.
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(10, game.other_player.max_mana)
card_draw_mock = mock.Mock(side_effect=game.other_player.draw)
game.other_player.draw = card_draw_mock
game.play_single_turn()
# Each time the player draws, they will draw another wild growth, which will turn into excess mana,
# which will draw another card. However, because of the ordering of the cards, the last excess mana
# will be after a wild growth, which prevents SpellTestingAgent from playing the card, so only
# 5 draws are made instead of the possible 6
self.assertEqual(5, card_draw_mock.call_count)
def test_Wrath(self):
game = generate_game_for(Wrath, StonetuskBoar, EnemyMinionSpellTestingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(5, len(game.current_player.hand))
random.seed(1857)
game = generate_game_for(Wrath, MogushanWarden, EnemyMinionSpellTestingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
# Two wraths will have been played
self.assertEqual(1, game.other_player.minions[0].health)
def test_HealingTouch(self):
game = generate_game_for(HealingTouch, StonetuskBoar, SelfSpellTestingAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.other_player.hero.health = 20
game.play_single_turn()
self.assertEqual(28, game.current_player.hero.health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(30, game.current_player.hero.health)
def test_MarkOfNature(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), MarkOfNature()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [OneCardPlayingAgent(), OneCardPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, game.other_player.minions[0].calculate_attack())
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), MarkOfNature()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
agent = OneCardPlayingAgent()
agent.choose_option = lambda options, player: options[1]
game = Game([deck1, deck2], [agent, OneCardPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertEqual(5, game.other_player.minions[0].health)
self.assertTrue(game.other_player.minions[0].taunt)
def test_SavageRoar(self):
deck1 = StackedDeck([StonetuskBoar(), StonetuskBoar(), SavageRoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.MAGE)
game = Game([deck1, deck2], [OneCardPlayingAgent(), OneCardPlayingAgent()])
game.current_player = 1
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
minion_increase_mock = mock.Mock()
game.other_player.minions[0].bind("attack_changed", minion_increase_mock)
game.other_player.minions[1].bind("attack_changed", minion_increase_mock)
player_increase_mock = mock.Mock()
game.other_player.hero.bind("attack_changed", player_increase_mock)
game.play_single_turn()
self.assertEqual(0, game.current_player.mana)
# Make sure the attack got increased
self.assertListEqual([mock.call(2), mock.call(2)], minion_increase_mock.call_args_list)
self.assertListEqual([mock.call(2)], player_increase_mock.call_args_list)
# And make sure that it went down again
self.assertEqual(1, game.current_player.minions[0].calculate_attack())
self.assertEqual(1, game.current_player.minions[1].calculate_attack())
self.assertEqual(0, game.current_player.hero.calculate_attack())
def test_Bite(self):
testing_env = self
class BiteAgent(EnemySpellTestingAgent):
def do_turn(self, player):
super().do_turn(player)
if player.mana == 0:
testing_env.assertEqual(4, game.current_player.hero.calculate_attack())
testing_env.assertEqual(4, game.current_player.hero.armor)
game = generate_game_for(Bite, StonetuskBoar, BiteAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
def test_SoulOfTheForest(self):
game = playback(Replay("tests/replays/card_tests/SoulOfTheForest.hsreplay"))
game.start()
self.assertEqual(2, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[0].calculate_attack())
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual("Treant", game.other_player.minions[0].card.name)
def test_Swipe(self):
deck1 = StackedDeck([BloodfenRaptor(), StonetuskBoar(), StonetuskBoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([Swipe()], CHARACTER_CLASS.DRUID, )
game = Game([deck1, deck2], [OneCardPlayingAgent(), EnemyMinionSpellTestingAgent()])
game.pre_game()
game.current_player = game.players[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
# The bloodfen raptor should be left, with one hp
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(29, game.other_player.hero.health)
def test_KeeperOfTheGrove(self):
# Test Moonfire option
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, OneCardPlayingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
# Test Dispel option
random.seed(1857)
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, OneCardPlayingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertTrue(game.current_player.minions[0].charge())
game.play_single_turn()
self.assertFalse(game.other_player.minions[0].charge())
# Test when there are no targets for the spell
random.seed(1857)
game = generate_game_for(KeeperOfTheGrove, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Keeper of the Grove", game.current_player.minions[0].card.name)
def test_Moonfire(self):
game = generate_game_for(Moonfire, StonetuskBoar, EnemySpellTestingAgent, OneCardPlayingAgent)
game.play_single_turn()
self.assertEqual(26, game.other_player.hero.health)
def test_DruidOfTheClaw(self):
game = generate_game_for(DruidOfTheClaw, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 9):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(4, game.current_player.minions[0].calculate_attack())
self.assertEqual(4, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].charge())
self.assertFalse(game.current_player.minions[0].taunt)
test_bear = game.current_player.minions[0].card.create_minion(None)
test_bear.player = game.current_player
self.assertEqual(4, test_bear.calculate_attack())
self.assertEqual(4, test_bear.calculate_max_health())
game.current_player.agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(4, game.current_player.minions[0].calculate_attack())
self.assertEqual(6, game.current_player.minions[0].calculate_max_health())
self.assertFalse(game.current_player.minions[0].charge())
self.assertTrue(game.current_player.minions[0].taunt)
test_bear = game.current_player.minions[0].card.create_minion(None)
test_bear.player = game.current_player
self.assertEqual(4, test_bear.calculate_attack())
self.assertEqual(6, test_bear.calculate_max_health())
def test_Nourish(self):
# Test gaining two mana
game = generate_game_for(Nourish, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(7, game.current_player.max_mana)
self.assertEqual(7, len(game.current_player.hand))
# Ensure that the case where we would be over 10 mana is handled correctly
game.play_single_turn()
game.play_single_turn()
# Nourish is played. it brings the player to 10
self.assertEqual(10, game.current_player.max_mana)
self.assertEqual(5, game.current_player.mana)
game.play_single_turn()
game.play_single_turn()
# Nourish is played. It doesn't affect the max_mana, but it does fill in two crystals.
# Tested on patch 2.1.0.7785
self.assertEqual(10, game.current_player.max_mana)
self.assertEqual(7, game.current_player.mana)
# Test drawing three cards
random.seed(1857)
game = generate_game_for(Nourish, StonetuskBoar, CardTestingAgent, DoNothingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(10, len(game.current_player.hand))
self.assertEqual(5, game.current_player.max_mana)
def test_Starfall(self):
# Test damage to all
game = generate_game_for(Starfall, StonetuskBoar, CardTestingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(30, game.other_player.hero.health)
# Test damage to one
random.seed(1857)
game = generate_game_for(Starfall, MogushanWarden, CardTestingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(30, game.other_player.hero.health)
def test_ForceOfNature(self):
game = generate_game_for(ForceOfNature, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 10):
game.play_single_turn()
def check_minions():
self.assertEqual(3, len(game.current_player.minions))
for minion in game.current_player.minions:
self.assertEqual(2, minion.calculate_attack())
self.assertEqual(2, minion.health)
self.assertEqual(2, minion.calculate_max_health())
self.assertTrue(minion.charge())
self.assertEqual("Treant", minion.card.name)
game.other_player.bind_once("turn_ended", check_minions)
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
def test_Starfire(self):
game = generate_game_for(Starfire, MogushanWarden, EnemyMinionSpellTestingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(7, game.other_player.minions[1].health)
self.assertEqual(9, len(game.current_player.hand))
def test_AncientOfLore(self):
game = generate_game_for(AncientOfLore, Starfire, OneCardPlayingAgent, EnemySpellTestingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(game.other_player.hero.health, 25)
game.play_single_turn()
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual("Ancient of Lore", game.current_player.minions[0].card.name)
random.seed(1857)
game = generate_game_for(AncientOfLore, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(10, len(game.current_player.hand))
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual("Ancient of Lore", game.current_player.minions[0].card.name)
def test_AncientOfWar(self):
game = generate_game_for(AncientOfWar, IronbeakOwl, OneCardPlayingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual(10, game.current_player.minions[0].health)
self.assertEqual(10, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Ancient of War", game.current_player.minions[0].card.name)
self.assertEqual(5, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.other_player.minions[0].health)
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertFalse(game.other_player.minions[0].taunt)
random.seed(1857)
game = generate_game_for(AncientOfWar, IronbeakOwl, OneCardPlayingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(10, game.current_player.minions[0].calculate_attack())
self.assertEqual(5, game.current_player.minions[0].health)
self.assertEqual(5, game.current_player.minions[0].calculate_max_health())
self.assertFalse(game.current_player.minions[0].taunt)
self.assertEqual("Ancient of War", game.current_player.minions[0].card.name)
self.assertEqual(5, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.other_player.minions[0].health)
self.assertEqual(5, game.other_player.minions[0].calculate_max_health())
self.assertEqual(5, game.other_player.minions[0].calculate_attack())
self.assertFalse(game.other_player.minions[0].taunt)
def test_IronbarkProtector(self):
game = generate_game_for(IronbarkProtector, IronbeakOwl, OneCardPlayingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(8, game.current_player.minions[0].calculate_attack())
self.assertEqual(8, game.current_player.minions[0].health)
self.assertEqual(8, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Ironbark Protector", game.current_player.minions[0].card.name)
self.assertEqual(6, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(7, len(game.current_player.minions))
self.assertFalse(game.other_player.minions[0].taunt)
def test_Cenarius(self):
deck1 = StackedDeck([StonetuskBoar()], CHARACTER_CLASS.DRUID)
deck2 = StackedDeck([WarGolem(), WarGolem(), Cenarius(), Cenarius()], CHARACTER_CLASS.DRUID)
game = Game([deck1, deck2], [DoNothingAgent(), OneCardPlayingAgent()])
game.pre_game()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
for minion in game.other_player.minions:
self.assertEqual(7, minion.calculate_attack())
self.assertEqual(7, minion.health)
self.assertEqual(7, minion.calculate_max_health())
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual(8, game.current_player.minions[0].health)
self.assertEqual(8, game.current_player.minions[0].calculate_max_health())
self.assertEqual("Cenarius", game.current_player.minions[0].card.name)
for minion_index in range(1, 3):
minion = game.current_player.minions[minion_index]
self.assertEqual(9, minion.calculate_attack())
self.assertEqual(9, minion.health)
self.assertEqual(9, minion.calculate_max_health())
game.players[1].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[1].calculate_attack())
self.assertEqual(8, game.current_player.minions[1].health)
self.assertEqual(8, game.current_player.minions[1].calculate_max_health())
self.assertEqual("Cenarius", game.current_player.minions[1].card.name)
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].health)
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Treant", game.current_player.minions[0].card.name)
self.assertEqual(2, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].health)
self.assertEqual(2, game.current_player.minions[2].calculate_max_health())
self.assertTrue(game.current_player.minions[2].taunt)
self.assertEqual("Treant", game.current_player.minions[2].card.name)
def test_PoisonSeeds(self):
game = generate_game_for([StonetuskBoar, BloodfenRaptor, IronfurGrizzly, PoisonSeeds],
HauntedCreeper, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(3, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(6, len(game.other_player.minions))
for minion in game.current_player.minions:
self.assertEqual("Treant", minion.card.name)
self.assertEqual(2, minion.calculate_attack())
self.assertEqual(2, minion.calculate_max_health())
for index in range(0, 4):
self.assertEqual("Spectral Spider", game.other_player.minions[index].card.name)
self.assertEqual(1, game.other_player.minions[index].calculate_attack())
self.assertEqual(1, game.other_player.minions[index].calculate_max_health())
self.assertEqual("Treant", game.other_player.minions[4].card.name)
self.assertEqual(2, game.other_player.minions[4].calculate_attack())
self.assertEqual(2, game.other_player.minions[4].calculate_max_health())
self.assertEqual("Treant", game.other_player.minions[5].card.name)
self.assertEqual(2, game.other_player.minions[5].calculate_attack())
self.assertEqual(2, game.other_player.minions[5].calculate_max_health())
def test_AnodizedRoboCub(self):
game = generate_game_for(AnodizedRoboCub, IronbeakOwl, OneCardPlayingAgent, OneCardPlayingAgent)
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(3, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].health)
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Anodized Robo Cub", game.current_player.minions[0].card.name)
self.assertEqual(0, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(2, game.other_player.minions[0].calculate_max_health())
self.assertFalse(game.other_player.minions[0].taunt)
random.seed(1857)
game = generate_game_for(AnodizedRoboCub, IronbeakOwl, OneCardPlayingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(3, game.current_player.minions[0].health)
self.assertEqual(3, game.current_player.minions[0].calculate_max_health())
self.assertTrue(game.current_player.minions[0].taunt)
self.assertEqual("Anodized Robo Cub", game.current_player.minions[0].card.name)
self.assertEqual(0, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(2, game.other_player.minions[0].health)
self.assertEqual(2, game.other_player.minions[0].calculate_max_health())
self.assertEqual(2, game.other_player.minions[0].calculate_attack())
self.assertFalse(game.other_player.minions[0].taunt)
def test_MechBearCat(self):
game = generate_game_for(MechBearCat, Whirlwind, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 11):
game.play_single_turn()
self.assertEqual(8, len(game.players[0].hand))
self.assertEqual(6, game.players[0].minions[0].health)
# Whirlwind damages Mech-Bear-Cat drawing a Spare Part
game.play_single_turn()
self.assertEqual(9, len(game.players[0].hand))
self.assertEqual(5, game.players[0].minions[0].health)
self.assertEqual("Rusty Horn", game.players[0].hand[8].name)
def test_DarkWispers(self):
game = generate_game_for(DarkWispers, SaltyDog, CardTestingAgent, OneCardPlayingAgent)
game.players[0].agent.choose_option = lambda options, player: options[1]
for turn in range(0, 10):
game.play_single_turn()
# 1 Salty Dog on the field
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(7, game.players[1].minions[0].calculate_attack())
self.assertEqual(4, game.players[1].minions[0].health)
self.assertFalse(game.players[1].minions[0].taunt)
# Chooses to buff enemy Salty Dog
game.play_single_turn()
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(12, game.players[1].minions[0].calculate_attack())
self.assertEqual(9, game.players[1].minions[0].health)
self.assertTrue(game.players[1].minions[0].taunt)
random.seed(1857)
game = generate_game_for(DarkWispers, SaltyDog, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 10):
game.play_single_turn()
# 1 Salty Dog on the field
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
# Summons 5 Wisps
game.play_single_turn()
self.assertEqual(5, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
self.assertEqual(1, game.players[0].minions[0].health)
self.assertEqual("Wisp", game.players[0].minions[0].card.name)
self.assertEqual(1, game.players[0].minions[1].calculate_attack())
self.assertEqual(1, game.players[0].minions[1].health)
self.assertEqual("Wisp", game.players[0].minions[1].card.name)
self.assertEqual(1, game.players[0].minions[2].calculate_attack())
self.assertEqual(1, game.players[0].minions[2].health)
self.assertEqual("Wisp", game.players[0].minions[2].card.name)
self.assertEqual(1, game.players[0].minions[3].calculate_attack())
self.assertEqual(1, game.players[0].minions[3].health)
self.assertEqual("Wisp", game.players[0].minions[3].card.name)
self.assertEqual(1, game.players[0].minions[4].calculate_attack())
self.assertEqual(1, game.players[0].minions[4].health)
self.assertEqual("Wisp", game.players[0].minions[4].card.name)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(7, len(game.players[0].minions))
def test_DruidOfTheFang(self):
game = generate_game_for([StonetuskBoar, DruidOfTheFang], DruidOfTheFang,
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(10):
game.play_single_turn()
self.assertEqual(2, len(game.other_player.minions))
self.assertEqual(7, game.other_player.minions[0].calculate_attack())
self.assertEqual(7, game.other_player.minions[0].calculate_max_health())
self.assertEqual(MINION_TYPE.BEAST, game.other_player.minions[0].card.minion_type)
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(4, game.current_player.minions[0].calculate_attack())
self.assertEqual(4, game.current_player.minions[0].calculate_max_health())
game.other_player.minions[0].silence()
self.assertEqual(7, game.other_player.minions[0].calculate_attack())
self.assertEqual(7, game.other_player.minions[0].calculate_max_health())
self.assertEqual(MINION_TYPE.BEAST, game.other_player.minions[0].card.minion_type)
def test_Recycle(self):
game = generate_game_for(Recycle, StonetuskBoar, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(10):
game.play_single_turn()
self.assertEqual(5, len(game.current_player.minions))
self.assertEqual(21, game.current_player.deck.left)
game.play_single_turn()
self.assertEqual(4, len(game.other_player.minions))
self.assertEqual(22, game.other_player.deck.left)
def test_Malorne(self):
game = generate_game_for(Malorne, Assassinate, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(13):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(20, game.players[0].deck.left)
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(21, game.other_player.deck.left)
for card in game.other_player.deck.cards:
self.assertIsNotNone(card)
def test_Malorne_game_ends(self):
game = generate_game_for(Malorne, Malorne, PlayAndAttackAgent, PlayAndAttackAgent)
for turn in range(500):
game.play_single_turn()
self.assertTrue(game.game_ended)
def test_GroveTender(self):
game = generate_game_for(GroveTender, Wisp, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 4):
game.play_single_turn()
# Before Gift of Mana
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(2, game.players[0].max_mana)
self.assertEqual(2, game.players[1].max_mana)
game.play_single_turn()
# Both players have 1 more full mana crystal
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, game.players[0].mana)
self.assertEqual(4, game.players[0].max_mana)
self.assertEqual(3, game.players[1].mana)
self.assertEqual(3, game.players[1].max_mana)
game.players[0].agent.choose_option = lambda options, player: options[1]
# Before Gift of Cards
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(5, len(game.players[0].hand))
self.assertEqual(8, len(game.players[1].hand))
# Both players draw 1
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(6, len(game.players[0].hand))
self.assertEqual(9, len(game.players[1].hand))
def test_TreeOfLife(self):
game = generate_game_for([SpiderTank, Hellfire, TreeOfLife], [SpiderTank, Deathwing],
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 16):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(1, game.players[0].minions[0].health)
self.assertEqual(1, game.players[1].minions[0].health)
self.assertEqual(27, game.current_player.hero.health)
self.assertEqual(27, game.other_player.hero.health)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(4, game.players[0].minions[0].health)
self.assertEqual(4, game.players[1].minions[0].health)
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(30, game.other_player.hero.health)
def test_TreeOfLifeAuchenai(self):
game = generate_game_for([ShieldBlock, AuchenaiSoulpriest, TreeOfLife], [ShieldBlock, Deathwing],
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 16):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(5, game.players[0].minions[0].health)
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(30, game.other_player.hero.health)
self.assertEqual(5, game.current_player.hero.armor)
self.assertEqual(5, game.other_player.hero.armor)
game.play_single_turn()
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(5, game.current_player.hero.health)
self.assertEqual(5, game.other_player.hero.health)
self.assertEqual(0, game.current_player.hero.armor)
self.assertEqual(0, game.other_player.hero.armor)
def test_DruidOfTheFlame(self):
game = generate_game_for(DruidOfTheFlame, StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
self.assertEqual(2, game.current_player.minions[0].calculate_max_health())
test_cat = game.current_player.minions[0].card.create_minion(None)
test_cat.player = game.current_player
self.assertEqual(5, test_cat.calculate_attack())
self.assertEqual(2, test_cat.calculate_max_health())
game.current_player.agent.choose_option = lambda options, player: options[1]
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[0].calculate_attack())
self.assertEqual(5, game.current_player.minions[0].calculate_max_health())
test_bird = game.current_player.minions[0].card.create_minion(None)
test_bird.player = game.current_player
self.assertEqual(2, test_bird.calculate_attack())
self.assertEqual(5, test_bird.calculate_max_health())
def test_Malorne_UnstablePortal(self):
class MalornePortal(SpellCard):
def __init__(self):
super().__init__("Malorne Portal", 2, CHARACTER_CLASS.MAGE, CARD_RARITY.RARE)
def use(self, player, game):
super().use(player, game)
query = CollectionSource([HasCardName("Malorne")])
new_minon = query.get_card(player, player, self)
player.hand.append(new_minon)
new_minon.attach(new_minon, player)
game = generate_game_for(MalornePortal, Naturalize, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(3):
game.play_single_turn()
# Dump all the cards but the portal'd minion
game.current_player.hand = game.current_player.hand[-1:]
for turn in range(10):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
def test_Malorne_Deathlord(self):
game = generate_game_for([Deathlord, Naturalize, Naturalize], Malorne, CardTestingAgent, DoNothingAgent)
for turn in range(5):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Deathlord", game.current_player.minions[0].card.name)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(0, len(game.current_player.minions))
def test_VolcanicLumberer(self):
game = generate_game_for(LeeroyJenkins, [TwistingNether, VolcanicLumberer],
OneCardPlayingAgent, CardTestingAgent)
for turn in range(0, 15):
game.play_single_turn()
self.assertEqual(4, len(game.players[0].minions))
self.assertEqual(7, len(game.players[1].minions))
game.play_single_turn()
self.assertEqual(0, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(7, game.players[1].minions[0].calculate_attack())
self.assertEqual(8, game.players[1].minions[0].calculate_max_health())
| {
"content_hash": "8d737a0167a7d594b4f21146dec5a05d",
"timestamp": "",
"source": "github",
"line_count": 1125,
"max_line_length": 113,
"avg_line_length": 41.58311111111111,
"alnum_prop": 0.6561210747953229,
"repo_name": "jirenz/CS229_Project",
"id": "cae0f28523fc9502d660a3936c77d27261d68535",
"size": "46781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/card_tests/druid_tests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "1391785"
}
],
"symlink_target": ""
} |
{
'!langcode!': 'pt',
'!langname!': 'Português',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "campo1=\'novo_valor\'". Não é permitido atualizar ou apagar resultados de um JOIN',
'%s %%{row} deleted': '%s registros apagados',
'%s %%{row} updated': '%s registros atualizados',
'%Y-%m-%d': '%d/%m/%Y',
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
'(requires internet access)': '(requer acesso à internet)',
'(requires internet access, experimental)': '(requer acesso à internet, experimental)',
'(something like "it-it")': '(algo como "it-it")',
'@markmin\x01(file **gluon/contrib/plural_rules/%s.py** is not found)': '(file **gluon/contrib/plural_rules/%s.py** is not found)',
'@markmin\x01An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'@markmin\x01Searching: **%s** %%{file}': 'Searching: **%s** files',
'A new version of web2py is available': 'Está disponível uma nova versão do web2py',
'A new version of web2py is available: %s': 'Está disponível uma nova versão do web2py: %s',
'About': 'sobre',
'About application': 'Sobre a aplicação',
'additional code for your application': 'código adicional para sua aplicação',
'Additional code for your application': 'Código adicional para a sua aplicação',
'admin disabled because no admin password': ' admin desabilitado por falta de senha definida',
'admin disabled because not supported on google app engine': 'admin dehabilitado, não é soportado no GAE',
'admin disabled because unable to access password file': 'admin desabilitado, não foi possível ler o arquivo de senha',
'Admin is disabled because insecure channel': 'Admin desabilitado pois o canal não é seguro',
'Admin is disabled because unsecure channel': 'Admin desabilitado pois o canal não é seguro',
'Admin language': 'Linguagem do Admin',
'administrative interface': 'interface administrativa',
'Administrator Password:': 'Senha de administrador:',
'and rename it (required):': 'e renomeie (requerido):',
'and rename it:': ' e renomeie:',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'admin desabilitado, canal inseguro',
'application "%s" uninstalled': 'aplicação "%s" desinstalada',
'application compiled': 'aplicação compilada',
'application is compiled and cannot be designed': 'A aplicação está compilada e não pode ser modificada',
'Application name:': 'Nome da aplicação:',
'are not used': 'não usadas',
'are not used yet': 'ainda não usadas',
'Are you sure you want to delete file "%s"?': 'Tem certeza que deseja apagar o arquivo "%s"?',
'Are you sure you want to delete plugin "%s"?': 'Tem certeza que deseja apagar o plugin "%s"?',
'Are you sure you want to delete this object?': 'Are you sure you want to delete this object?',
'Are you sure you want to uninstall application "%s"': 'Tem certeza que deseja apagar a aplicação "%s"?',
'Are you sure you want to uninstall application "%s"?': 'Tem certeza que deseja apagar a aplicação "%s"?',
'Are you sure you want to upgrade web2py now?': 'Tem certeza que deseja atualizar o web2py agora?',
'arguments': 'argumentos',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENÇÃO o login requer uma conexão segura (HTTPS) ou executar de localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENÇÃO OS TESTES NÃO THREAD SAFE, NÃO EFETUE MÚLTIPLOS TESTES AO MESMO TEMPO.',
'ATTENTION: you cannot edit the running application!': 'ATENÇÃO: Não pode modificar a aplicação em execução!',
'Autocomplete Python Code': 'Autocompletar Código Python',
'Available databases and tables': 'Bancos de dados e tabelas disponíveis',
'back': 'voltar',
'browse': 'buscar',
'cache': 'cache',
'cache, errors and sessions cleaned': 'cache, erros e sessões eliminadas',
'can be a git repo': 'can be a git repo',
'Cannot be empty': 'Não pode ser vazio',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'Não é possível compilar: Existem erros em sua aplicação. Depure, corrija os errros e tente novamente',
'Cannot compile: there are errors in your app:': 'Não é possível compilar: Existem erros em sua aplicação',
'cannot create file': 'Não é possível criar o arquivo',
'cannot upload file "%(filename)s"': 'não é possível fazer upload do arquivo "%(filename)s"',
'Change admin password': 'mudar senha de administrador',
'change editor settings': 'mudar definições do editor',
'Change Password': 'Trocar Senha',
'check all': 'marcar todos',
'Check for upgrades': 'checar por atualizações',
'Check to delete': 'Marque para apagar',
'Checking for upgrades...': 'Buscando atualizações...',
'Clean': 'limpar',
'click here for online examples': 'clique para ver exemplos online',
'click here for the administrative interface': 'Clique aqui para acessar a interface administrativa',
'Click row to expand traceback': 'Clique em uma coluna para expandir o log do erro',
'click to check for upgrades': 'clique aqui para checar por atualizações',
'click to open': 'clique para abrir',
'Client IP': 'IP do cliente',
'code': 'código',
'collapse/expand all': 'colapsar/expandir tudo',
'commit (mercurial)': 'commit (mercurial)',
'Compile': 'compilar',
'compiled application removed': 'aplicação compilada removida',
'Controllers': 'Controladores',
'controllers': 'controladores',
'Count': 'Contagem',
'Create': 'criar',
'create file with filename:': 'criar um arquivo com o nome:',
'Create new application using the Wizard': 'Criar nova aplicação utilizando o assistente',
'create new application:': 'nome da nova aplicação:',
'Create new simple application': 'Crie uma nova aplicação',
'Create/Upload': 'Create/Upload',
'created by': 'criado por',
'crontab': 'crontab',
'Current request': 'Requisição atual',
'Current response': 'Resposta atual',
'Current session': 'Sessão atual',
'currently running': 'Executando',
'currently saved or': 'Atualmente salvo ou',
'customize me!': 'Modifique-me',
'data uploaded': 'Dados enviados',
'database': 'banco de dados',
'database %s select': 'Seleção no banco de dados %s',
'database administration': 'administração de banco de dados',
'Date and Time': 'Data e Hora',
'db': 'db',
'Debug': 'Debug',
'defines tables': 'define as tabelas',
'Delete': 'Apague',
'delete': 'apagar',
'delete all checked': 'apagar marcados',
'delete plugin': 'apagar plugin',
'Delete this file (you will be asked to confirm deletion)': 'Delete this file (you will be asked to confirm deletion)',
'Delete:': 'Apague:',
'Deploy': 'publicar',
'Deploy on Google App Engine': 'Publicar no Google App Engine',
'Deploy to OpenShift': 'Deploy to OpenShift',
'Description': 'Descrição',
'design': 'modificar',
'DESIGN': 'Projeto',
'Design for': 'Projeto de',
'Detailed traceback description': 'Detailed traceback description',
'direction: ltr': 'direção: ltr',
'Disable': 'Disable',
'docs': 'docs',
'done!': 'feito!',
'download layouts': 'download layouts',
'Download layouts from repository': 'Download layouts from repository',
'download plugins': 'download plugins',
'Download plugins from repository': 'Download plugins from repository',
'E-mail': 'E-mail',
'EDIT': 'EDITAR',
'Edit': 'editar',
'Edit application': 'Editar aplicação',
'edit controller': 'editar controlador',
'Edit current record': 'Editar o registro atual',
'Edit Profile': 'Editar Perfil',
'edit views:': 'editar visões:',
'Editing %s': 'A Editar %s',
'Editing file': 'Editando arquivo',
'Editing file "%s"': 'Editando arquivo "%s"',
'Editing Language file': 'Editando arquivo de linguagem',
'Enterprise Web Framework': 'Framework web empresarial',
'Error': 'Erro',
'Error logs for "%(app)s"': 'Logs de erro para "%(app)s"',
'Error snapshot': 'Error snapshot',
'Error ticket': 'Error ticket',
'Errors': 'erros',
'Exception instance attributes': 'Atributos da instancia de excessão',
'Exit Fullscreen': 'Sair de Ecrã Inteiro',
'Expand Abbreviation (html files only)': 'Expandir Abreviação (só para ficheiros html)',
'export as csv file': 'exportar como arquivo CSV',
'exposes': 'expõe',
'extends': 'estende',
'failed to reload module': 'Falha ao recarregar o módulo',
'failed to reload module because:': 'falha ao recarregar o módulo por:',
'File': 'Arquivo',
'file "%(filename)s" created': 'arquivo "%(filename)s" criado',
'file "%(filename)s" deleted': 'arquivo "%(filename)s" apagado',
'file "%(filename)s" uploaded': 'arquivo "%(filename)s" enviado',
'file "%(filename)s" was not deleted': 'arquivo "%(filename)s" não foi apagado',
'file "%s" of %s restored': 'arquivo "%s" de %s restaurado',
'file changed on disk': 'arquivo modificado no disco',
'file does not exist': 'arquivo não existe',
'file saved on %(time)s': 'arquivo salvo em %(time)s',
'file saved on %s': 'arquivo salvo em %s',
'filter': 'filtro',
'Find Next': 'Localizar Seguinte',
'Find Previous': 'Localizar Anterior',
'First name': 'Nome',
'Frames': 'Frames',
'Functions with no doctests will result in [passed] tests.': 'Funções sem doctests resultarão em testes [aceitos].',
'graph model': 'graph model',
'Group ID': 'ID do Grupo',
'Hello World': 'Olá Mundo',
'Help': 'ajuda',
'Hide/Show Translated strings': '',
'htmledit': 'htmledit',
'If the report above contains a ticket number it indicates a failure in executing the controller, before any attempt to execute the doctests. This is usually due to an indentation error or an error outside function code.\nA green title indicates that all tests (if defined) passed. In this case test results are not shown.': 'Se o relatório acima contém um número de ticket, isso indica uma falha no controlador em execução, antes de tantar executar os doctests. Isto acontece geralmente por erro de endentação ou erro fora do código da função.\r\nO titulo em verde indica que os testes (se definidos) passaram. Neste caso os testes não são mostrados.',
'Import/Export': 'Importar/Exportar',
'includes': 'inclui',
'insert new': 'inserir novo',
'insert new %s': 'inserir novo %s',
'inspect attributes': 'inspecionar atributos',
'Install': 'instalar',
'Installed applications': 'Aplicações instaladas',
'internal error': 'erro interno',
'Internal State': 'Estado Interno',
'Invalid action': 'Ação inválida',
'Invalid email': 'E-mail inválido',
'invalid password': 'senha inválida',
'Invalid Query': 'Consulta inválida',
'invalid request': 'solicitação inválida',
'invalid ticket': 'ticket inválido',
'Keyboard shortcuts': 'Atalhos de teclado',
'language file "%(filename)s" created/updated': 'arquivo de linguagem "%(filename)s" criado/atualizado',
'Language files (static strings) updated': 'Arquivos de linguagem (textos estáticos) atualizados',
'languages': 'linguagens',
'Languages': 'Linguagens',
'languages updated': 'linguagens atualizadas',
'Last name': 'Sobrenome',
'Last saved on:': 'Salvo em:',
'License for': 'Licença para',
'loading...': 'carregando...',
'locals': 'locals',
'Login': 'Entrar',
'login': 'inicio de sessão',
'Login to the Administrative Interface': 'Entrar na interface adminitrativa',
'Logout': 'finalizar sessão',
'Lost Password': 'Senha perdida',
'Manage': 'Manage',
'manage': 'gerenciar',
'merge': 'juntar',
'Models': 'Modelos',
'models': 'modelos',
'Modules': 'Módulos',
'modules': 'módulos',
'Name': 'Nome',
'new application "%s" created': 'nova aplicação "%s" criada',
'New application wizard': 'Assistente para novas aplicações ',
'new plugin installed': 'novo plugin instalado',
'New Record': 'Novo registro',
'new record inserted': 'novo registro inserido',
'New simple application': 'Nova aplicação básica',
'next 100 rows': 'próximos 100 registros',
'NO': 'NÃO',
'No databases in this application': 'Não existem bancos de dados nesta aplicação',
'no match': 'não encontrado',
'no package selected': 'nenhum pacote selecionado',
'online designer': 'online designer',
'or alternatively': 'or alternatively',
'Or Get from URL:': 'Ou Obtenha do URL:',
'or import from csv file': 'ou importar de um arquivo CSV',
'or provide app url:': 'ou forneça a url de uma aplicação:',
'or provide application url:': 'ou forneça a url de uma aplicação:',
'Origin': 'Origem',
'Original/Translation': 'Original/Tradução',
'Overwrite installed app': 'sobrescrever aplicação instalada',
'Pack all': 'criar pacote',
'Pack compiled': 'criar pacote compilado',
'Pack custom': 'Pack custom',
'pack plugin': 'empacotar plugin',
'PAM authenticated user, cannot change password here': 'usuario autenticado por PAM, não pode alterar a senha por aqui',
'Password': 'Senha',
'password changed': 'senha alterada',
'Peeking at file': 'Visualizando arquivo',
'plugin "%(plugin)s" deleted': 'plugin "%(plugin)s" eliminado',
'Plugin "%s" in application': 'Plugin "%s" na aplicação',
'plugins': 'plugins',
'Plugins': 'Plugins',
'Plural-Forms:': 'Plural-Forms:',
'Powered by': 'Este site utiliza',
'previous 100 rows': '100 registros anteriores',
'Private files': 'Private files',
'private files': 'private files',
'Query:': 'Consulta:',
'Rapid Search': 'Rapid Search',
'record': 'registro',
'record does not exist': 'o registro não existe',
'record id': 'id do registro',
'Record ID': 'ID do Registro',
'Register': 'Registrar-se',
'Registration key': 'Chave de registro',
'Reload routes': 'Reload routes',
'Remove compiled': 'eliminar compilados',
'Replace': 'Substituir',
'Replace All': 'Substituir Tudo',
'request': 'request',
'Resolve Conflict file': 'Arquivo de resolução de conflito',
'response': 'response',
'restore': 'restaurar',
'revert': 'reverter',
'Role': 'Papel',
'Rows in table': 'Registros na tabela',
'Rows selected': 'Registros selecionados',
'rules are not defined': 'rules are not defined',
"Run tests in this file (to run all files, you may also use the button labelled 'test')": "Run tests in this file (to run all files, you may also use the button labelled 'test')",
'Running on %s': 'A correr em %s',
'Save': 'Save',
'save': 'salvar',
'Save file:': 'Gravar ficheiro:',
'Save file: %s': 'Gravar ficheiro: %s',
'Save via Ajax': 'Gravar via Ajax',
'Saved file hash:': 'Hash do arquivo salvo:',
'selected': 'selecionado(s)',
'session': 'session',
'session expired': 'sessão expirada',
'shell': 'Terminal',
'Site': 'site',
'some files could not be removed': 'alguns arquicos não puderam ser removidos',
'Start searching': 'Start searching',
'Start wizard': 'iniciar assistente',
'state': 'estado',
'Static': 'Static',
'static': 'estáticos',
'Static files': 'Arquivos estáticos',
'Submit': 'Submit',
'submit': 'enviar',
'Sure you want to delete this object?': 'Tem certeza que deseja apaagr este objeto?',
'table': 'tabela',
'Table name': 'Nome da tabela',
'test': 'testar',
'Testing application': 'Testando a aplicação',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'A "consulta" é uma condição como "db.tabela.campo1==\'valor\'". Algo como "db.tabela1.campo1==db.tabela2.campo2" resulta em um JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'A lógica da aplicação, cada URL é mapeada para uma função exposta pelo controlador',
'The application logic, each URL path is mapped in one exposed function in the controller': 'The application logic, each URL path is mapped in one exposed function in the controller',
'the data representation, define database tables and sets': 'A representação dos dadps, define tabelas e estruturas de dados',
'The data representation, define database tables and sets': 'The data representation, define database tables and sets',
'The presentations layer, views are also known as templates': 'The presentations layer, views are also known as templates',
'the presentations layer, views are also known as templates': 'A camada de apresentação, As visões também são chamadas de templates',
'There are no controllers': 'Não existem controllers',
'There are no models': 'Não existem modelos',
'There are no modules': 'Não existem módulos',
'There are no plugins': 'There are no plugins',
'There are no private files': '',
'There are no static files': 'Não existem arquicos estáticos',
'There are no translators, only default language is supported': 'Não há traduções, somente a linguagem padrão é suportada',
'There are no views': 'Não existem visões',
'These files are not served, they are only available from within your app': 'These files are not served, they are only available from within your app',
'These files are served without processing, your images go here': 'These files are served without processing, your images go here',
'these files are served without processing, your images go here': 'Estes arquivos são servidos sem processamento, suas imagens ficam aqui',
'This is the %(filename)s template': 'Este é o template %(filename)s',
'Ticket': 'Ticket',
'Ticket ID': 'Ticket ID',
'Timestamp': 'Data Atual',
'TM': 'MR',
'to previous version.': 'para a versão anterior.',
'To create a plugin, name a file/folder plugin_[name]': 'Para criar um plugin, nomeio um arquivo/pasta como plugin_[nome]',
'toggle breakpoint': 'toggle breakpoint',
'Toggle comment': 'Toggle comment',
'Toggle Fullscreen': 'Toggle Fullscreen',
'Traceback': 'Traceback',
'translation strings for the application': 'textos traduzidos para a aplicação',
'Translation strings for the application': 'Translation strings for the application',
'try': 'tente',
'try something like': 'tente algo como',
'Try the mobile interface': 'Try the mobile interface',
'Unable to check for upgrades': 'Não é possível checar as atualizações',
'unable to create application "%s"': 'não é possível criar a aplicação "%s"',
'unable to delete file "%(filename)s"': 'não é possível criar o arquico "%(filename)s"',
'unable to delete file plugin "%(plugin)s"': 'não é possível criar o plugin "%(plugin)s"',
'Unable to download': 'Não é possível efetuar o download',
'Unable to download app': 'Não é possível baixar a aplicação',
'Unable to download app because:': 'Não é possível baixar a aplicação porque:',
'Unable to download because': 'Não é possível baixar porque',
'unable to parse csv file': 'não é possível analisar o arquivo CSV',
'unable to uninstall "%s"': 'não é possível instalar "%s"',
'unable to upgrade because "%s"': 'não é possível atualizar porque "%s"',
'uncheck all': 'desmarcar todos',
'Uninstall': 'desinstalar',
'update': 'atualizar',
'update all languages': 'atualizar todas as linguagens',
'Update:': 'Atualizar:',
'upgrade web2py now': 'atualize o web2py agora',
'upload': 'upload',
'Upload': 'Upload',
'Upload & install packed application': 'Faça upload e instale uma aplicação empacotada',
'Upload a package:': 'Faça upload de um pacote:',
'Upload and install packed application': 'Upload and install packed application',
'upload application:': 'Fazer upload de uma aplicação:',
'Upload existing application': 'Faça upload de uma aplicação existente',
'upload file:': 'Enviar arquivo:',
'upload plugin file:': 'Enviar arquivo de plugin:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para criar consultas mais complexas.',
'Use an url:': 'Use uma url:',
'User ID': 'ID do Usuario',
'variables': 'variáveis',
'Version': 'Versão',
'versioning': 'versionamento',
'Versioning': 'Versioning',
'view': 'visão',
'Views': 'Visões',
'views': 'visões',
'Web Framework': 'Web Framework',
'web2py is up to date': 'web2py está atualizado',
'web2py Recent Tweets': 'Tweets Recentes de @web2py',
'web2py upgraded; please restart it': 'web2py atualizado; favor reiniciar',
'Welcome to web2py': 'Bem-vindo ao web2py',
'YES': 'SIM',
}
| {
"content_hash": "4ce0517c5d698db546ddd8d324231d00",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 653,
"avg_line_length": 52.316489361702125,
"alnum_prop": 0.7190788470337044,
"repo_name": "laonawuli/addrest",
"id": "3f850c3cb0a154d08206e790f03d2e75fb28d8ea",
"size": "19976",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "web2py/applications/admin/languages/pt.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "48827"
},
{
"name": "HTML",
"bytes": "238373"
},
{
"name": "JavaScript",
"bytes": "2775673"
},
{
"name": "Makefile",
"bytes": "6758"
},
{
"name": "Python",
"bytes": "6580950"
},
{
"name": "Ruby",
"bytes": "850"
},
{
"name": "Shell",
"bytes": "104494"
},
{
"name": "Tcl",
"bytes": "94649"
}
],
"symlink_target": ""
} |
"""
test_get_user_config.
Tests formerly known from a unittest residing in test_config.py named
TestGetUserConfig.test_get_user_config_valid
TestGetUserConfig.test_get_user_config_invalid
TestGetUserConfig.test_get_user_config_nonexistent
"""
import os
import shutil
import pytest
from cookiecutter import config
from cookiecutter.exceptions import InvalidConfiguration
@pytest.fixture(scope='module')
def user_config_path():
return os.path.expanduser('~/.cookiecutterrc')
@pytest.fixture(scope='function')
def back_up_rc(request, user_config_path):
"""
Back up an existing cookiecutter rc and restore it after the test.
If ~/.cookiecutterrc is pre-existing, move it to a temp location
"""
user_config_path_backup = os.path.expanduser('~/.cookiecutterrc.backup')
if os.path.exists(user_config_path):
shutil.copy(user_config_path, user_config_path_backup)
os.remove(user_config_path)
def remove_test_rc():
"""Remove the ~/.cookiecutterrc that has been created in the test."""
if os.path.exists(user_config_path):
os.remove(user_config_path)
def restore_original_rc():
"""If it existed, restore the original ~/.cookiecutterrc."""
if os.path.exists(user_config_path_backup):
shutil.copy(user_config_path_backup, user_config_path)
os.remove(user_config_path_backup)
# According to the py.test source code finalizers are popped from an
# internal list that we populated via 'addfinalizer'. As a result the
# last-added finalizer function is executed first.
request.addfinalizer(restore_original_rc)
request.addfinalizer(remove_test_rc)
@pytest.fixture
def custom_config():
return {
'default_context': {
'full_name': 'Firstname Lastname',
'email': '[email protected]',
'github_username': 'example',
},
'cookiecutters_dir': '/home/example/some-path-to-templates',
'replay_dir': '/home/example/some-path-to-replay-files',
'abbreviations': {
'gh': 'https://github.com/{0}.git',
'gl': 'https://gitlab.com/{0}.git',
'bb': 'https://bitbucket.org/{0}',
'helloworld': 'https://github.com/hackebrot/helloworld',
}
}
@pytest.mark.usefixtures('back_up_rc')
def test_get_user_config_valid(user_config_path, custom_config):
"""Get config from a valid ~/.cookiecutterrc file."""
shutil.copy('tests/test-config/valid-config.yaml', user_config_path)
conf = config.get_user_config()
assert conf == custom_config
@pytest.mark.usefixtures('back_up_rc')
def test_get_user_config_invalid(user_config_path):
"""Get config from an invalid ~/.cookiecutterrc file."""
shutil.copy('tests/test-config/invalid-config.yaml', user_config_path)
with pytest.raises(InvalidConfiguration):
config.get_user_config()
@pytest.mark.usefixtures('back_up_rc')
def test_get_user_config_nonexistent():
"""Get config from a nonexistent ~/.cookiecutterrc file."""
assert config.get_user_config() == config.DEFAULT_CONFIG
@pytest.fixture
def custom_config_path(custom_config):
return 'tests/test-config/valid-config.yaml'
def test_specify_config_path(mocker, custom_config_path, custom_config):
spy_get_config = mocker.spy(config, 'get_config')
user_config = config.get_user_config(custom_config_path)
spy_get_config.assert_called_once_with(custom_config_path)
assert user_config == custom_config
def test_default_config_path(user_config_path):
assert config.USER_CONFIG_PATH == user_config_path
def test_default_config_from_env_variable(
monkeypatch, custom_config_path, custom_config):
monkeypatch.setenv('COOKIECUTTER_CONFIG', custom_config_path)
user_config = config.get_user_config()
assert user_config == custom_config
def test_force_default_config(mocker):
spy_get_config = mocker.spy(config, 'get_config')
user_config = config.get_user_config(None, default_config=True)
assert user_config == config.DEFAULT_CONFIG
assert not spy_get_config.called
def test_expand_user_for_directories_in_config(monkeypatch):
def _expanduser(path):
return path.replace('~', 'Users/bob')
monkeypatch.setattr('os.path.expanduser', _expanduser)
config_file = 'tests/test-config/config-expand-user.yaml'
user_config = config.get_user_config(config_file)
assert user_config['replay_dir'] == 'Users/bob/replay-files'
assert user_config['cookiecutters_dir'] == 'Users/bob/templates'
def test_expand_vars_for_directories_in_config(monkeypatch):
monkeypatch.setenv('COOKIES', 'Users/bob/cookies')
config_file = 'tests/test-config/config-expand-vars.yaml'
user_config = config.get_user_config(config_file)
assert user_config['replay_dir'] == 'Users/bob/cookies/replay-files'
assert user_config['cookiecutters_dir'] == 'Users/bob/cookies/templates'
| {
"content_hash": "251687f49f9ac20018078e6ee6d24ec2",
"timestamp": "",
"source": "github",
"line_count": 150,
"max_line_length": 77,
"avg_line_length": 33.12,
"alnum_prop": 0.6924315619967794,
"repo_name": "luzfcb/cookiecutter",
"id": "de286527a2c249a7be85445aa0da696648b4855c",
"size": "4993",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_get_user_config.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "3206"
},
{
"name": "Python",
"bytes": "215934"
},
{
"name": "Shell",
"bytes": "161"
}
],
"symlink_target": ""
} |
"""Implement an API wrapper around Ikea Tradfri."""
from .error import (
PyTradFriError, RequestError, ClientError, ServerError, RequestTimeout)
from .gateway import Gateway
__all__ = ['Gateway', 'PyTradFriError', 'RequestError', 'ClientError',
'ServerError', 'RequestTimeout']
| {
"content_hash": "6cab0e89c816ede3ca92c15b89e2c342",
"timestamp": "",
"source": "github",
"line_count": 8,
"max_line_length": 75,
"avg_line_length": 36.875,
"alnum_prop": 0.7152542372881356,
"repo_name": "r41d/pytradfri",
"id": "343ae3a814c48e47d3d2faea5c4766fdf1110f6e",
"size": "295",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pytradfri/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "62079"
},
{
"name": "Shell",
"bytes": "1075"
}
],
"symlink_target": ""
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.