code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
class FundamentalPipeline(object):
def process_item(self, item, spider):
return item
| sp500/stock | fundamental/fundamental/pipelines.py | Python | apache-2.0 | 291 |
s = \
"""7b: neg-int
7c: not-int
7d: neg-long
7e: not-long
7f: neg-float
80: neg-double
81: int-to-long
82: int-to-float
83: int-to-double
84: long-to-int
85: long-to-float
86: long-to-double
87: float-to-int
88: float-to-long
89: float-to-double
8a: double-to-int
8b: double-to-long
8c: double-to-float
8d: int-to-byte
8e: int-to-char
8f: int-to-short"""
l = s.splitlines()
name_str_list = []
mem_str_list = []
value_str_list = []
for i in l:
ii = i.split(": ")
name = ii[1]
value_str_list.append(ii[0].upper())
mem_str_list.append(name)
name_list = name.split("-")
name_str = ""
for j in name_list:
name_str += (j[0].upper() + j[1:])
name_str_list.append(name_str)
print name_str_list
for name in name_str_list:
print " void On%s(unsigned char reg1, unsigned char reg2);" % name
index = 0
for name in name_str_list:
print ("void BytecodeSegment::On%s(unsigned char reg1,\n" + \
" %s unsigned char reg2)") % \
(name, " " * len(name))
print "{"
print " this->PrintLineNum();"
print " fprintf(this->out_file,"
print " \"%s v%%u, v%%u\\n\"," % (mem_str_list[index], )
print " reg1, "
print " reg2); "
print "}\n"
index += 1
for i in value_str_list:
print " case 0x%s:" % (i, )
print " " * 16 + "byte2 = this->GetNextByte();"
print " " * 16 + "byte3 = byte2 >> 4;"
print " " * 16 + "byte2 &= 0x0F;"
print " " * 16 + "switch(byte1)"
print " " * 16 + "{"
index = 0
for i in value_str_list:
print " " * 20 + "case 0x%s:" % (i, )
print " " * 24 + "this->On%s(byte2, byte3);" % (name_str_list[index],)
print " " * 24 + "break;"
index += 1
| wangziqi2013/Android-Dalvik-Analysis | doc/generate_7b_8f.py | Python | apache-2.0 | 1,736 |
"""
Copyright 2010 Sami Dalouche
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import cherrypy
import logging
from pymager import domain
from pymager import imgengine
from pymager import web
from pymager.web._derivedimagemetadataurldecoder import DerivedImageMetadataUrlDecoder
from pymager.web._derivedimagemetadataurldecoder import UrlDecodingError
from cherrypy.lib.static import serve_file
logger = logging.getLogger("web.derivedresource")
class DerivedResource(object):
exposed = True
def __init__(self, config, image_processor, image_format_mapper):
super(DerivedResource, self).__init__()
self.__config = config
self.__image_processor = image_processor
self._image_format_mapper = image_format_mapper
def __not_found(self):
return cherrypy.NotFound(cherrypy.request.path_info)
#@cherrypy.expose
#def index(self):
# return "Derived Resource!"
#@cherrypy.expose
def GET(self, derived_urisegment):
logger.debug("GET %s" % (derived_urisegment,))
try:
derivedItemUrlDecoder = DerivedImageMetadataUrlDecoder(self._image_format_mapper, derived_urisegment)
except UrlDecodingError:
raise self.__not_found()
else:
try:
request = imgengine.TransformationRequest(
self._image_format_mapper,
derivedItemUrlDecoder.itemid,
(derivedItemUrlDecoder.width, derivedItemUrlDecoder.height),
derivedItemUrlDecoder.format)
except imgengine.ImageFormatNotSupportedException, e:
print e.image_format
raise cherrypy.HTTPError(status=400, message="The requested image format is Invalid: %s" % (e.image_format))
else:
try:
relative_path = self.__image_processor.prepare_transformation(request)
except imgengine.ImageMetadataNotFoundException:
raise self.__not_found()
except imgengine.SecurityCheckException:
raise cherrypy.HTTPError(status=403, message="The requested image transformation is not allowed (%sx%s)" % (derivedItemUrlDecoder.width, derivedItemUrlDecoder.height))
path = os.path.join(self.__config.data_directory, relative_path)
return serve_file(path)
| pymager/pymager | pymager/web/_derivedresource.py | Python | apache-2.0 | 2,955 |
# Copyright 2015, Pinterest, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generic ui-related utilities."""
import pickle
from pinball.config.pinball_config import PinballConfig
from pinball.ui.data import JobData
from pinball.ui.data import Status
from pinball.parser.config_parser import ParserCaller
from pinball.parser.utils import load_parser_with_caller
from pinball.workflow.name import Name
__author__ = 'Pawel Garbacki, Mao Ye'
__copyright__ = 'Copyright 2015, Pinterest, Inc.'
__credits__ = [__author__]
__license__ = 'Apache'
__version__ = '2.0'
def get_workflow_jobs_from_parser_by_web_viewer(workflow):
return get_workflow_jobs_from_parser(workflow, ParserCaller.WEB_VIEWER)
def get_workflow_jobs_from_parser_by_graph_builder(workflow):
return get_workflow_jobs_from_parser(workflow, ParserCaller.GRAPH_BUILDER)
def get_workflow_jobs_from_parser(workflow, parser_caller):
config_parser = load_parser_with_caller(PinballConfig.PARSER,
PinballConfig.PARSER_PARAMS,
parser_caller)
tokens = config_parser.get_workflow_tokens(workflow)
jobs_data = []
for token in tokens:
name = Name.from_job_token_name(token.name)
if name.job:
assert name.workflow == workflow
job = pickle.loads(token.data)
jobs_data.append(JobData(workflow=workflow,
instance=None,
job=name.job,
job_type=job.__class__.__name__,
is_condition=job.IS_CONDITION,
info=job.info(),
inputs=job.inputs,
outputs=job.outputs,
emails=job.emails,
max_attempts=job.max_attempts,
retry_delay_sec=job.retry_delay_sec,
warn_timeout_sec=job.warn_timeout_sec,
abort_timeout_sec=job.abort_timeout_sec,
priority=token.priority,
status=Status.NEVER_RUN))
return jobs_data
| pinterest/pinball | pinball/ui/utils.py | Python | apache-2.0 | 2,834 |
import re
import urllib2
import json
from distutils.version import LooseVersion
import datetime
import os
import requests
import time
from uuid import UUID
GITHUB_TAGS = "https://api.github.com/repos/apache/cassandra/git/refs/tags"
GITHUB_BRANCHES = "https://api.github.com/repos/apache/cassandra/branches"
KNOWN_SERIES = tuple(('no_series',
'daily_regressions_trunk-compaction',
'daily_regressions_trunk-repair_10M',
'daily_regressions_trunk-compaction_stcs',
'daily_regressions_trunk-compaction_dtcs',
'daily_regressions_trunk-compaction_lcs',
'daily_regressions_trunk-commitlog_sync',
))
def get_tagged_releases(series='stable'):
"""Retrieve git tags and find version numbers for a release series
series - 'stable', 'oldstable', or 'testing'"""
releases = []
if series == 'testing':
# Testing releases always have a hyphen after the version number:
tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+-.*$)')
else:
# Stable and oldstable releases are just a number:
tag_regex = re.compile('^refs/tags/cassandra-([0-9]+\.[0-9]+\.[0-9]+$)')
r = urllib2.urlopen(GITHUB_TAGS)
for ref in (i.get('ref', '') for i in json.loads(r.read())):
m = tag_regex.match(ref)
if m:
releases.append(LooseVersion(m.groups()[0]))
# Sort by semver:
releases.sort(reverse=True)
stable_major_version = LooseVersion(str(releases[0].version[0]) + "." + str(releases[0].version[1]))
stable_releases = [release for release in releases if release >= stable_major_version]
oldstable_releases = [release for release in releases if release not in stable_releases]
oldstable_major_version = LooseVersion(str(oldstable_releases[0].version[0]) + "." + str(oldstable_releases[0].version[1]))
oldstable_releases = [release for release in oldstable_releases if release >= oldstable_major_version]
if series == 'testing':
return ['cassandra-' + release.vstring for release in releases]
elif series == 'stable':
return ['cassandra-'+release.vstring for release in stable_releases]
elif series == 'oldstable':
return ['cassandra-'+release.vstring for release in oldstable_releases]
else:
raise AssertionError("unknown release series: {series}".format(series=series))
def get_branches():
"""Retrieve branch names in release sorted order
Does not include trunk.
eg : ['cassandra-3.0','cassandra-2.1','cassandra-2.0','cassandra-1.2']"""
branches = []
branch_regex = re.compile('^cassandra-([0-9]+\.[0-9]+$)')
r = urllib2.urlopen(GITHUB_BRANCHES)
data = json.loads(r.read())
for name in (i.get('name', '') for i in data):
m = branch_regex.match(name)
if m:
branches.append(LooseVersion(m.groups()[0]))
# Sort by semver:
branches.sort(reverse=True)
return ['apache/cassandra-'+b.vstring for b in branches]
def copy_and_update(d1, d2):
r = d1.copy()
r.update(d2)
return r
def get_shas_from_stats(stats):
"""
Given a stats dictionary, such as would be returned from calling json.loads
on the value returned from the /stats endpoint on a cstar_perf test,
return a dictionary mapping from the revisions to the SHA used for the
test.
"""
revisions_with_git_id = [r for r in stats['revisions'] if
('revision' in r
and 'git_id' in r
and r['git_id'])]
revisions_collapsed_git_id = [copy_and_update(r, {'git_id': (set(r['git_id'].values())
if isinstance(r['git_id'], dict)
else {r['git_id']})})
for r in revisions_with_git_id]
rv = {r['revision']: r['git_id'] for r in revisions_collapsed_git_id}
print 'collapsed to {}'.format(rv)
return rv
def uuid_to_datetime(uid):
return datetime.datetime.fromtimestamp((uid.get_time() - 0x01b21dd213814000L)*100/1e9)
def uuid_absolute_distance_from_datetime(ref_dt):
def absolute_distance_from_ref_datetime(cmp_uuid):
return abs(ref_dt - uuid_to_datetime(cmp_uuid))
return absolute_distance_from_ref_datetime
def get_cstar_jobs_uuids(cstar_server, series=None):
if series is None:
uuids_file = os.path.join(os.getcwd(), os.path.dirname(__file__), 'all-uuids.txt')
with open(uuids_file) as f:
uuids = list(line.strip() for line in f.readlines())
else:
series_url = '/'.join([cstar_server, 'api', 'series', series,
str(1), str(int(time.time()))])
series_uuids = None
try:
series_uuids = requests.get(series_url)
except requests.exceptions.ConnectionError as e:
print "Can't get series uuids: {}".format(e)
uuids = json.loads(series_uuids.text)['series']
return uuids
def get_sha_from_build_days_ago(cstar_server, day_deltas, revision):
print 'getting sha from {}'.format(revision)
test_uuids = []
for series in [None] + list(KNOWN_SERIES):
uuids_from_series = get_cstar_jobs_uuids(cstar_server=cstar_server, series=series)
if uuids_from_series:
test_uuids.extend(uuids_from_series)
test_uuids = list(map(UUID, ['{' + u + '}' for u in test_uuids]))
closest_shas = []
for days_ago in day_deltas:
td = datetime.datetime.now() - datetime.timedelta(days=days_ago)
print 'finding sha closest to {}'.format(td)
test_ids_by_distance_asc = list(sorted(test_uuids,
key=uuid_absolute_distance_from_datetime(td)))
for test_id in test_ids_by_distance_asc[:30]:
print 'trying {}'.format(test_id)
stats_url = '/'.join(
[cstar_server, 'tests', 'artifacts', str(test_id), 'stats', 'stats.{}.json'.format(str(test_id))])
try:
stats_json = requests.get(stats_url).text
except requests.exceptions.ConnectionError as e:
print "didn't work :( {}".format(e)
continue
try:
stats_data = json.loads(stats_json)
except ValueError as e:
print "didn't work :( {}".format(e)
continue
shas = get_shas_from_stats(stats_data)
sha_set = shas.get(revision)
if sha_set and len(sha_set) == 1:
sha = next(iter(sha_set))
print ' appending {}'.format(sha)
closest_shas.append(sha)
break
return closest_shas
# when executing this file and not importing it, run the tests
if __name__ == '__main__':
example_data = json.loads("""
{
"revisions": [
{
"env": "",
"git_id": {
"blade-11-6a": "5d38559908cfd8b5ecbad03a1cedb355d7856cee",
"blade-11-7a": "5d38559908cfd8b5ecbad03a1cedb355d7856cee",
"blade-11-8a": "5d38559908cfd8b5ecbad03a1cedb355d7856cee",
"ryan@blade-11-5a": "5d38559908cfd8b5ecbad03a1cedb355d7856cee"
},
"java_home": "~/fab/jvms/jdk1.8.0_45",
"label": null,
"last_log": "16d8514c-67a7-11e5-888c-002590892848",
"options": {
"use_vnodes": true
},
"product": "cassandra",
"revision": "apache/trunk",
"yaml": ""
},
{
"env": "",
"git_id": {
"blade-11-6a": "3bfeba37a6ccde99fba3170cb5eac977a566db30",
"blade-11-7a": "3bfeba37a6ccde99fba3170cb5eac977a566db30",
"blade-11-8a": "3bfeba37a6ccde99fba3170cb5eac977a566db30",
"ryan@blade-11-5a": "3bfeba37a6ccde99fba3170cb5eac977a566db30"
},
"java_home": "~/fab/jvms/jdk1.8.0_45",
"label": null,
"last_log": "042d405e-67aa-11e5-888c-002590892848",
"options": {
"use_vnodes": true
},
"product": "cassandra",
"revision": "tjake/rxjava-3.0",
"yaml": ""
}
],
"stats": [],
"subtitle": "",
"title": "rxjava 3.0 test 1"
}
""")
assert get_shas_from_stats(example_data) == {
'apache/trunk': ['5d38559908cfd8b5ecbad03a1cedb355d7856cee'],
'tjake/rxjava-3.0': ['3bfeba37a6ccde99fba3170cb5eac977a566db30']
}
| mambocab/cstar_perf | regression_suites/util.py | Python | apache-2.0 | 8,896 |
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-fixed-ips'
fixed_ips_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
]
def list_rules():
return fixed_ips_policies
| rajalokan/nova | nova/policies/fixed_ips.py | Python | apache-2.0 | 925 |
# Copyright 2015 cybojenix <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from ..base import XdaBase
from ..decorators import handle_error
from ..model.forum import Forum as ForumModel
class Forums(XdaBase):
def __init__(self, xda):
super(Forums, self).__init__(xda)
self.api = handle_error(self.xda.api.forums)
def forums(self):
data = self.api.forums()
data = data.get('results', [])
return (ForumModel(d) for d in data)
def children(self, forum_id):
data = self.api.children(forum_id)
data = data.get('results', [])
return (ForumModel(d) for d in data)
def find_by_device(self, query):
data = self.api.find_by_device(query)
data = data.get('results', [])
return (ForumModel(d) for d in data)
def general(self):
data = self.api.general()
data = data.get('results', [])
return (ForumModel(d) for d in data)
def newest(self):
data = self.api.newest()
data = data.get('results', [])
return (ForumModel(d) for d in data)
def subscribed(self):
data = self.api.subscribed()
data = data.get('results', [])
return (ForumModel(d) for d in data)
def top(self):
data = self.api.top()
data = data.get('results', [])
return (ForumModel(d) for d in data)
# TODO: The ones below return a dictionary with success = true in.
# This should be handled properly when we do the error handling
# implementation later.
def subscribe(self, forum_id):
return self.api.subscribe(forum_id)
def mark_read(self, forum_id):
return self.api.mark_read(forum_id)
def unsubscribe(self, forum_id):
return self.api.unsubscribe(forum_id) | CyboLabs/XdaPy | XdaPy/entry/forums.py | Python | apache-2.0 | 2,357 |
import wdl.parser
import wdl.util
from wdl.types import *
from wdl.values import *
import os
import json
import re
import inspect
class WdlBindingException(Exception): pass
class TaskNotFoundException(Exception): pass
class WdlValueException(Exception): pass
class EvalException(Exception): pass
def scope_hierarchy(scope):
if scope is None: return []
return [scope] + scope_hierarchy(scope.parent)
def fqn_head(fqn):
try:
(h, t) = tuple(fqn.split('.', 1))
except ValueError:
(h, t) = (fqn, '')
return (h, t)
def fqn_tail(fqn):
try:
(h, t) = tuple(fqn.rsplit('.', 1))
except ValueError:
(h, t) = (fqn, '')
return (h, t)
def coerce_inputs(namespace, inputs_dict):
coerced_inputs = {}
for k, v in inputs_dict.items():
decl = namespace.resolve(k)
if decl is None:
raise WdlBindingException("Fully-qualified name '{}' does not resolve to anything".format(k))
if not isinstance(decl, wdl.binding.Declaration):
raise WdlBindingException("Expecting '{}' to resolve to a declaration, got {}".format(k, decl))
value = coerce(v, decl.type)
coerced_inputs[k] = value
return coerced_inputs
class WdlNamespace(object):
def __init__(self, source_location, source_wdl, tasks, workflows, ast):
self.__dict__.update(locals())
self.fully_qualified_name = ''
def resolve(self, fqn):
(name, sub_fqn) = fqn_head(fqn)
for task in self.tasks:
if task.name == name and sub_fqn == '': return task
for workflow in self.workflows:
if workflow.name == name:
if sub_fqn == '': return workflow
else: return workflow.resolve(fqn)
return None
def __str__(self):
return '[WdlNamespace tasks={} workflows={}]'.format(
','.join([t.name for t in self.tasks]),
','.join([w.name for w in self.workflows])
)
class Expression(object):
def __init__(self, ast):
self.__dict__.update(locals())
def eval(self, lookup=lambda var: None, functions=None):
return eval(self.ast, lookup, functions)
def wdl_string(self):
return expr_str(self.ast) if self.ast else str(None)
def __str__(self):
return '[Expression {}]'.format(self.wdl_string())
class Task(object):
def __init__(self, name, declarations, command, outputs, runtime, parameter_meta, meta, ast):
self.__dict__.update(locals())
def __getattr__(self, name):
if name == 'inputs':
return [decl for decl in self.declarations if decl.expression is not None]
def __str__(self):
return '[Task name={}]'.format(self.name)
class Command(object):
def __init__(self, parts, ast):
self.__dict__.update(locals())
def instantiate(self, lookup_function=None, wdl_functions=None):
cmd = []
for part in self.parts:
if isinstance(part, CommandString):
cmd.append(part.string)
elif isinstance(part, CommandExpressionTag):
value = part.expression.eval(lookup_function, wdl_functions)
if isinstance(value, WdlValue) and isinstance(value.type, WdlPrimitiveType):
value = value.as_string()
elif isinstance(value, WdlArray) and isinstance(value.subtype, WdlPrimitiveType) and 'sep' in part.attributes:
value = part.attributes['sep'].join(x.as_string() for x in value.value)
else:
raise EvalException('Could not string-ify part {}: {}'.format(part, value))
cmd.append(value)
return wdl.util.strip_leading_ws(''.join(cmd))
def wdl_string(self):
return wdl.util.strip_leading_ws(''.join([part.wdl_string() for part in self.parts]))
def __str__(self):
return '[Command: {}]'.format(self.wdl_string().replace('\n', '\\n').replace('\r', '\\r'))
class CommandPart: pass
class CommandExpressionTag(CommandPart):
def __init__(self, attributes, expression, ast):
self.__dict__.update(locals())
def wdl_string(self):
attr_string = ', '.join(['{}={}'.format(k, v) for k, v in self.attributes.items()])
return '${' + '{}{}'.format(attr_string, self.expression.wdl_string()) + '}'
def __str__(self):
return '[CommandExpressionTag: {}]'.format(self.wdl_string())
class CommandString(CommandPart):
def __init__(self, string, terminal):
self.__dict__.update(locals())
def wdl_string(self):
return self.string
def __str__(self):
return '[CommandString: {}]'.format(self.string)
# Scope has: body, declarations, parent, prefix, name
class Scope(object):
def __init__(self, name, declarations, body):
self.__dict__.update(locals())
self.parent = None
for element in body:
element.parent = self
def upstream(self): return []
def downstream(self): return []
def __getattr__(self, name):
if name == 'fully_qualified_name':
if self.parent is None:
return self.name
return self.parent.fully_qualified_name + '.' + self.name
def calls(self):
def calls_r(node):
if isinstance(node, Call):
return set([node])
elif isinstance(node, Scope):
call_list = set()
for element in node.body:
call_list.update(calls_r(element))
return call_list
else:
return set()
return calls_r(self)
def scopes(self):
def scopes_r(node):
if isinstance(node, Call):
return set([node])
elif isinstance(node, Scope):
scopes = set([node])
for element in node.body:
scopes.update(scopes_r(element))
return scopes
else:
return set()
return scopes_r(self)
class Workflow(Scope):
def __init__(self, name, declarations, body, ast):
self.__dict__.update(locals())
super(Workflow, self).__init__(name, declarations, body)
def resolve(self, fqn):
def get_r(node, fqn):
(head, decl_name) = fqn_tail(fqn)
if node.fully_qualified_name == fqn:
return node
if isinstance(node, Call) and node.fully_qualified_name == head:
for decl in node.task.declarations:
if decl.name == decl_name:
return decl
for element in node.body:
if isinstance(element, Scope):
sub = get_r(element, fqn)
if sub: return sub
return get_r(self, fqn)
class Call(Scope):
def __init__(self, task, alias, inputs, ast):
self.__dict__.update(locals())
super(Call, self).__init__(alias if alias else task.name, [], [])
def __getattr__(self, name):
if name == 'fully_qualified_name':
parent_fqn = self.parent.fully_qualified_name
parent_fqn = re.sub(r'\._[sw]\d+', '', parent_fqn)
return '{}.{}'.format(parent_fqn, self.name)
def upstream(self):
hierarchy = scope_hierarchy(self)
up = set()
for scope in hierarchy:
if isinstance(scope, Scatter):
up.add(scope)
up.update(scope.upstream())
for expression in self.inputs.values():
for node in wdl.find_asts(expression.ast, "MemberAccess"):
lhs_expr = expr_str(node.attr('lhs'))
parent = self.parent
up_val = None
while parent and not up_val:
fqn = '{}.{}'.format(parent.name, lhs_expr)
up_val = hierarchy[-1].resolve(fqn)
parent = parent.parent
if up_val:
up.add(up_val)
return up
def downstream(self):
root = scope_hierarchy(self)[-1]
down = set()
for scope in root.scopes():
if self in scope.upstream(): down.add(scope)
return down
def get_scatter_parent(self, node=None):
for parent in scope_hierarchy(self):
if isinstance(parent, Scatter):
return parent
def __self__(self):
return '[Call alias={}]'.format(self.alias)
class Declaration(object):
def __init__(self, name, type, expression, ast):
self.__dict__.update(locals())
def __str__(self):
return '[Declaration type={}, name={}, expr={}]'.format(self.type, self.name, expr_str(self.expression))
def wdl_string(self):
return '{} {}{}'.format(self.type.wdl_string(), self.name, ' = {}'.format(self.expression.wdl_string()) if self.expression else '')
class WhileLoop(Scope):
def __init__(self, expression, declarations, body, ast):
self.__dict__.update(locals())
super(WhileLoop, self).__init__('_w' + str(ast.id), declarations, body)
class Scatter(Scope):
def __init__(self, item, collection, declarations, body, ast):
self.__dict__.update(locals())
super(Scatter, self).__init__('_s' + str(ast.id), declarations, body)
def downstream(self):
down = set(self.scopes())
down.discard(self)
return down
def upstream(self):
root = scope_hierarchy(self)[-1]
up = set()
for node in wdl.find_asts(self.collection.ast, "MemberAccess"):
lhs_expr = expr_str(node.attr('lhs'))
parent = self.parent
up_val = None
while parent and not up_val:
fqn = '{}.{}'.format(parent.name, lhs_expr)
up_val = root.resolve(fqn)
parent = parent.parent
if up_val:
up.add(up_val)
return up
class WorkflowOutputs(list):
def __init__(self, arg=[]):
super(WorkflowOutputs, self).__init__(arg)
class WorkflowOutput:
def __init__(self, fqn, wildcard):
self.__dict__.update(locals())
def assign_ids(ast_root, id=0):
if isinstance(ast_root, wdl.parser.AstList):
ast_root.id = id
for index, node in enumerate(ast_root):
assign_ids(node, id+index)
elif isinstance(ast_root, wdl.parser.Ast):
ast_root.id = id
for index, attr in enumerate(ast_root.attributes.values()):
assign_ids(attr, id+index)
elif isinstance(ast_root, wdl.parser.Terminal):
ast_root.id = id
# Binding functions
def parse_namespace(string, resource):
errors = wdl.parser.DefaultSyntaxErrorHandler()
tokens = wdl.parser.lex(string, resource, errors)
ast = wdl.parser.parse(tokens).ast()
assign_ids(ast)
tasks = [parse_task(task_ast) for task_ast in wdl.find_asts(ast, 'Task')]
workflows = [parse_workflow(wf_ast, tasks) for wf_ast in wdl.find_asts(ast, 'Workflow')]
return WdlNamespace(resource, string, tasks, workflows, ast)
def parse_task(ast):
name = ast.attr('name').source_string
declarations = [parse_declaration(d) for d in ast.attr('declarations')]
command_ast = wdl.find_asts(ast, 'RawCommand')
command = parse_command(command_ast[0])
outputs = [parse_output(output_ast) for output_ast in wdl.find_asts(ast, 'Output')]
runtime_asts = wdl.find_asts(ast, 'Runtime')
runtime = parse_runtime(runtime_asts[0]) if len(runtime_asts) else {}
return Task(name, declarations, command, outputs, runtime, {}, {}, ast)
def parse_workflow(ast, tasks):
body = []
declarations = []
name = ast.attr('name').source_string
for body_ast in ast.attr('body'):
if body_ast.name == 'Declaration':
declarations.append(parse_declaration(body_ast))
else:
body.append(parse_body_element(body_ast, tasks))
return Workflow(name, declarations, body, ast)
def parse_runtime(ast):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'Runtime':
raise WdlBindingException('Expecting a "Runtime" AST')
runtime = {}
for attr in ast.attr('map'):
runtime[attr.attr('key').source_string] = Expression(attr.attr('value'))
return runtime
def parse_declaration(ast):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'Declaration':
raise WdlBindingException('Expecting a "Declaration" AST')
type = parse_type(ast.attr('type'))
name = ast.attr('name').source_string
expression = Expression(ast.attr('expression')) if ast.attr('expression') else None
return Declaration(name, type, expression, ast)
def parse_body_element(ast, tasks):
if ast.name == 'Call':
return parse_call(ast, tasks)
elif ast.name == 'Workflow':
return parse_workflow(ast, tasks)
elif ast.name == 'WhileLoop':
return parse_while_loop(ast, tasks)
elif ast.name == 'Scatter':
return parse_scatter(ast, tasks)
elif ast.name == 'WorkflowOutputs':
return parse_workflow_outputs(ast)
else:
raise WdlBindingException("unknown ast: " + ast.name)
def parse_while_loop(ast, tasks):
expression = Expression(ast.attr('expression'))
body = []
declarations = []
for body_ast in ast.attr('body'):
if body_ast.name == 'Declaration':
declarations.append(parse_declaration(body_ast))
else:
body.append(parse_body_element(body_ast, tasks))
return WhileLoop(expression, declarations, body, ast)
def parse_scatter(ast, tasks):
collection = Expression(ast.attr('collection'))
item = ast.attr('item').source_string
body = []
declarations = []
for body_ast in ast.attr('body'):
if body_ast.name == 'Declaration':
declarations.append(parse_declaration(body_ast))
else:
body.append(parse_body_element(body_ast, tasks))
return Scatter(item, collection, declarations, body, ast)
def parse_workflow_outputs(ast):
return WorkflowOutputs([parse_workflow_output(x) for x in ast.attr('outputs')])
def parse_workflow_output(ast):
return WorkflowOutput(ast.attr('fqn').source_string, ast.attr('wildcard').source_string if ast.attr('wildcard') else None)
def parse_call(ast, tasks):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'Call':
raise WdlBindingException('Expecting a "Call" AST')
task_name = ast.attr('task').source_string
alias = ast.attr('alias').source_string if ast.attr('alias') else None
for task in tasks:
if task.name == task_name:
break
if task is None:
raise WdlBindingException('Could not find task with name: ' + task_name)
inputs = {}
try:
for mapping in wdl.find_asts(ast, 'Inputs')[0].attr('map'):
inputs[mapping.attr('key').source_string] = Expression(mapping.attr('value'))
except IndexError:
pass
return Call(task, alias, inputs, ast)
def parse_command_line_expr_attrs(ast):
attrs = {}
for x in ast:
attrs[x.attr('key').source_string] = x.attr('value').source_string
return attrs
def parse_command_line_expr(ast):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'CommandParameter':
raise WdlBindingException('Expecting a "CommandParameter" AST')
return CommandExpressionTag(
parse_command_line_expr_attrs(ast.attr('attributes')),
Expression(ast.attr('expr')),
ast
)
def parse_command(ast):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'RawCommand':
raise WdlBindingException('Expecting a "RawCommand" AST')
parts = []
for node in ast.attr('parts'):
if isinstance(node, wdl.parser.Terminal):
parts.append(CommandString(node.source_string, node))
if isinstance(node, wdl.parser.Ast) and node.name == 'CommandParameter':
parts.append(parse_command_line_expr(node))
return Command(parts, ast)
def parse_output(ast):
if not isinstance(ast, wdl.parser.Ast) or ast.name != 'Output':
raise WdlBindingException('Expecting an "Output" AST')
type = parse_type(ast.attr('type'))
var = ast.attr('var').source_string
expression = Expression(ast.attr('expression'))
return Declaration(var, type, expression, ast)
def parse_type(ast):
if isinstance(ast, wdl.parser.Terminal):
if ast.str != 'type':
raise WdlBindingException('Expecting an "Type" AST')
if ast.source_string == 'Int': return WdlIntegerType()
elif ast.source_string == 'Boolean': return WdlBooleanType()
elif ast.source_string == 'Float': return WdlFloatType()
elif ast.source_string == 'String': return WdlStringType()
elif ast.source_string == 'File': return WdlFileType()
elif ast.source_string == 'Uri': return WdlUriType()
else: raise WdlBindingException("Unsupported Type: {}".format(ast.source_string))
elif isinstance(ast, wdl.parser.Ast) and ast.name == 'Type':
name = ast.attr('name').source_string
if name == 'Array':
subtypes = ast.attr('subtype')
if len(subtypes) != 1:
raise WdlBindingException("Expecting only one subtype AST")
return WdlArrayType(parse_type(subtypes[0]))
if name == 'Map':
subtypes = ast.attr('subtype')
if len(subtypes) != 2:
raise WdlBindingException("Expecting only two subtype AST")
return WdlMapType(parse_type(subtypes[0]), parse_type(subtypes[1]))
else:
raise WdlBindingException('Expecting an "Type" AST')
def python_to_wdl_value(py_value, wdl_type):
if isinstance(wdl_type, WdlStringType):
return WdlString(py_value)
if isinstance(wdl_type, WdlIntegerType):
return WdlInteger(py_value)
if isinstance(wdl_type, WdlFloatType):
return WdlFloat(py_value)
if isinstance(wdl_type, WdlBooleanType):
return WdlBoolean(py_value)
if isinstance(wdl_type, WdlFileType):
return WdlFile(py_value)
if isinstance(wdl_type, WdlUriType):
return WdlUri(py_value)
if isinstance(wdl_type, WdlArrayType):
if not isinstance(py_value, list):
raise WdlValueException("{} must be constructed from Python list, got {}".format(wdl_type, py_value))
members = [python_to_wdl_value(x, wdl_type.subtype) for x in py_value]
return WdlArray(wdl_type.subtype, members)
if isinstance(wdl_type, WdlMapType):
if not isinstance(py_value, list):
raise WdlValueException("{} must be constructed from Python dict, got {}".format(wdl_type, py_value))
members = {python_to_wdl_value(k): python_to_wdl_value(v) for k,v in py_value.items()}
return WdlMap(members)
binary_operators = [
'Add', 'Subtract', 'Multiply', 'Divide', 'Remainder', 'Equals',
'NotEquals', 'LessThan', 'LessThanOrEqual', 'GreaterThan',
'GreaterThanOrEqual', 'LogicalAnd', 'LogicalOr'
]
unary_operators = [
'LogicalNot', 'UnaryPlus', 'UnaryNegation'
]
class WdlStandardLibraryFunctions:
# read a path, return its contents as a string
def read_file(self, wdl_file):
if os.path.exists(wdl_file.value):
with open(wdl_file.value) as fp:
return fp.read()
else:
raise EvalException('Path {} does not exist'.format(wdl_file.value))
def tsv(self, wdl_file):
lines = self.read_file(wdl_file).split('\n')
return [line.split('\t') for line in lines]
def single_param(self, params):
if len(params) == 1: return params[0]
else: raise EvalException('Expecting a single parameter, got: {}'.format(params))
def call(self, func_name, params):
# perhaps predicate=inspect.ismethod?
methods = dict(inspect.getmembers(self.__class__))
return methods[func_name](self, params)
def stdout(self, params): raise EvalException('stdout() not implemented')
def stderr(self, params): raise EvalException('stderr() not implemented')
def read_lines(self, params):
return WdlArray(WdlStringType(), [WdlString(x) for x in self.read_file(self.single_param(params)).split('\n')])
def read_string(self, params):
return WdlString(self.read_file(self.single_param(params)))
def read_int(self, params):
return WdlInteger(int(self.read_file(self.single_param(params))))
def read_map(self, params):
tsv = read_tsv(self.single_param(params))
if not all([len(row) == 2 for row in tsv]):
raise EvalException('read_map() expects the file {} to be a 2-column TSV'.format(self.single_param(params)))
return WdlMap(WdlStringType(), WdlStringType(), {
WdlString(row[0]): WdlString(row[1]) for row in tsv
})
def read_tsv(self, params):
table = tsv(self.single_param(params))
def interpolate(string, lookup, functions):
for expr_string in re.findall(r'\$\{.*?\}', string):
expr = wdl.parse_expr(expr_string[2:-1])
value = expr.eval(lookup, functions)
string = string.replace(expr_string, value.as_string())
return string
def eval(ast, lookup=lambda var: None, functions=None):
if isinstance(ast, Expression):
return eval(ast.ast, lookup, functions)
if isinstance(ast, wdl.parser.Terminal):
if ast.str == 'integer':
return WdlInteger(int(ast.source_string))
elif ast.str == 'float':
return WdlFloat(float(ast.source_string))
elif ast.str == 'string':
return WdlString(interpolate(ast.source_string, lookup, functions))
elif ast.str == 'boolean':
return WdlBoolean(True if ast.source_string == 'true' else False)
elif ast.str == 'identifier':
symbol = lookup(ast.source_string)
if symbol is None:
return WdlUndefined()
return symbol
elif isinstance(ast, wdl.parser.Ast):
if ast.name in binary_operators:
lhs = eval(ast.attr('lhs'), lookup, functions)
if isinstance(lhs, WdlUndefined): return lhs
rhs = eval(ast.attr('rhs'), lookup, functions)
if isinstance(rhs, WdlUndefined): return rhs
if ast.name == 'Add': return lhs.add(rhs)
if ast.name == 'Subtract': return lhs.subtract(rhs)
if ast.name == 'Multiply': return lhs.multiply(rhs)
if ast.name == 'Divide': return lhs.divide(rhs)
if ast.name == 'Remainder': return lhs.mod(rhs)
if ast.name == 'Equals': return lhs.equal(rhs)
if ast.name == 'NotEquals': return lhs.not_equal(rhs)
if ast.name == 'LessThan': return lhs.less_than(rhs)
if ast.name == 'LessThanOrEqual': return lhs.less_than_or_equal(rhs)
if ast.name == 'GreaterThan': return lhs.greater_than(rhs)
if ast.name == 'GreaterThanOrEqual': return lhs.greater_than_or_equal(rhs)
if ast.name == 'LogicalAnd': return lhs.logical_and(rhs)
if ast.name == 'LogicalOr': return lhs.logical_or(rhs)
if ast.name in unary_operators:
expr = eval(ast.attr('expression'), lookup, functions)
if isinstance(expr, WdlUndefined): return expr
if ast.name == 'LogicalNot': return expr.logical_not()
if ast.name == 'UnaryPlus': return expr.unary_plus()
if ast.name == 'UnaryNegation': return expr.unary_negation()
if ast.name == 'ObjectLiteral':
obj = WdlObject()
for member in ast.attr('map'):
key = member.attr('key').source_string
value = eval(member.attr('value'), lookup, functions)
if value is None or isinstance(value, WdlUndefined):
raise EvalException('Cannot evaluate expression')
obj.set(key, value)
return obj
if ast.name == 'ArrayLiteral':
values = [eval(x, lookup, functions) for x in ast.attr('values')]
return WdlArray(values[0].type, values)
if ast.name == 'ArrayOrMapLookup':
array_or_map = eval(ast.attr('lhs'), lookup, functions)
index = eval(ast.attr('rhs'), lookup, functions)
if isinstance(array_or_map, WdlArray) and isinstance(index, WdlInteger):
return array_or_map.value[index.value]
elif isinstance(array_or_map, WdlArray):
raise EvalException('Cannot index array {} with {}'.format(array_or_map, index))
elif isinstance(array_or_map, WdlMap) and isinstance(index.type, WdlPrimitiveType):
return array_or_map.value[index]
raise EvalException('ArrayOrMapLookup not implemented yet')
if ast.name == 'MemberAccess':
object = eval(ast.attr('lhs'), lookup, functions)
member = ast.attr('rhs')
if isinstance(object, WdlUndefined):
return object
if not isinstance(member, wdl.parser.Terminal) or member.str != 'identifier':
# TODO: maybe enforce this in the grammar?
raise EvalException('rhs needs to be an identifier')
member = member.source_string
return object.get(member)
if ast.name == 'FunctionCall':
function = ast.attr('name').source_string
parameters = [eval(x, lookup, functions) for x in ast.attr('params')]
if isinstance(functions, WdlStandardLibraryFunctions):
return functions.call(function, parameters)
else:
raise EvalException('No functions defined')
def expr_str(ast):
if isinstance(ast, wdl.parser.Terminal):
if ast.str == 'string':
return '"{}"'.format(ast.source_string)
return ast.source_string
elif isinstance(ast, wdl.parser.Ast):
if ast.name == 'Add':
return '{}+{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'Subtract':
return '{}-{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'Multiply':
return '{}*{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'Divide':
return '{}/{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'Remainder':
return '{}%{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'Equals':
return '{}=={}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'NotEquals':
return '{}!={}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'LessThan':
return '{}<{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'LessThanOrEqual':
return '{}<={}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'GreaterThan':
return '{}>{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'GreaterThanOrEqual':
return '{}>={}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'LogicalNot':
return '!{}'.format(expr_str(ast.attr('expression')))
if ast.name == 'UnaryPlus':
return '+{}'.format(expr_str(ast.attr('expression')))
if ast.name == 'UnaryNegation':
return '-{}'.format(expr_str(ast.attr('expression')))
if ast.name == 'FunctionCall':
return '{}({})'.format(expr_str(ast.attr('name')), ','.join([expr_str(param) for param in ast.attr('params')]))
if ast.name == 'ArrayIndex':
return '{}[{}]'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'MemberAccess':
return '{}.{}'.format(expr_str(ast.attr('lhs')), expr_str(ast.attr('rhs')))
if ast.name == 'ArrayLiteral':
return '[{}]'.format(', '.join(expr_str(x) for x in ast.attr('values')))
| broadinstitute/pywdl | wdl/binding.py | Python | apache-2.0 | 28,103 |
import tensorflow as tf
v1 = tf.Variable(0, dtype=tf.float32) # 定义一个变量,初始值为0
step = tf.Variable(0, trainable=False) # step为迭代轮数变量,控制衰减率
ema = tf.train.ExponentialMovingAverage(0.99, step) # 初始设定衰减率为0.99
maintain_averages_op = ema.apply([v1]) # 更新列表中的变量
with tf.Session() as sess:
init_op = tf.global_variables_initializer() # 初始化所有变量
sess.run(init_op)
print(sess.run([v1, ema.average(v1)])) # 输出初始化后变量v1的值和v1的滑动平均值
sess.run(tf.assign(v1, 5)) # 更新v1的值
sess.run(maintain_averages_op) # 更新v1的滑动平均值
print(sess.run([v1, ema.average(v1)]))
sess.run(tf.assign(step, 10000)) # 更新迭代轮转数step
sess.run(tf.assign(v1, 10))
sess.run(maintain_averages_op)
print(sess.run([v1, ema.average(v1)]))
# 再次更新滑动平均值,
sess.run(maintain_averages_op)
print(sess.run([v1, ema.average(v1)]))
# 更新v1的值为15
sess.run(tf.assign(v1, 15))
sess.run(maintain_averages_op)
print(sess.run([v1, ema.average(v1)]))
#
# [0.0, 0.0]
# [5.0, 4.5]
# [10.0, 4.5549998]
# [10.0, 4.6094499]
# [15.0, 4.7133551]
"""注意,使用了这个函数后会生成一个新的影子变量"""
a = tf.global_variables()
for ele in a:
print(ele.name)
# Variable: 0
# Variable_1: 0
# Variable/ExponentialMovingAverage: 0(新增的变量)
| Asurada2015/TFAPI_translation | Training/Moving Averages/tf_train_ExponentialMovingAverage.py | Python | apache-2.0 | 1,401 |
import numpy as np
import random
from ray.rllib.utils import try_import_tf
tf = try_import_tf()
def seed(np_seed=0, random_seed=0, tf_seed=0):
np.random.seed(np_seed)
random.seed(random_seed)
tf.set_random_seed(tf_seed)
| stephanie-wang/ray | rllib/utils/seed.py | Python | apache-2.0 | 235 |
"""
Helpers for Zigbee Home Automation.
For more details about this component, please refer to the documentation at
https://home-assistant.io/integrations/zha/
"""
import collections
import logging
import zigpy.types
from homeassistant.core import callback
from .const import (
ATTR_NAME,
CLUSTER_TYPE_IN,
CLUSTER_TYPE_OUT,
DATA_ZHA,
DATA_ZHA_GATEWAY,
DOMAIN,
)
from .registries import BINDABLE_CLUSTERS
_LOGGER = logging.getLogger(__name__)
ClusterPair = collections.namedtuple("ClusterPair", "source_cluster target_cluster")
async def safe_read(
cluster, attributes, allow_cache=True, only_cache=False, manufacturer=None
):
"""Swallow all exceptions from network read.
If we throw during initialization, setup fails. Rather have an entity that
exists, but is in a maybe wrong state, than no entity. This method should
probably only be used during initialization.
"""
try:
result, _ = await cluster.read_attributes(
attributes,
allow_cache=allow_cache,
only_cache=only_cache,
manufacturer=manufacturer,
)
return result
except Exception: # pylint: disable=broad-except
return {}
def get_attr_id_by_name(cluster, attr_name):
"""Get the attribute id for a cluster attribute by its name."""
return next(
(
attrid
for attrid, (attrname, datatype) in cluster.attributes.items()
if attr_name == attrname
),
None,
)
async def get_matched_clusters(source_zha_device, target_zha_device):
"""Get matched input/output cluster pairs for 2 devices."""
source_clusters = source_zha_device.async_get_std_clusters()
target_clusters = target_zha_device.async_get_std_clusters()
clusters_to_bind = []
for endpoint_id in source_clusters:
for cluster_id in source_clusters[endpoint_id][CLUSTER_TYPE_OUT]:
if cluster_id not in BINDABLE_CLUSTERS:
continue
for t_endpoint_id in target_clusters:
if cluster_id in target_clusters[t_endpoint_id][CLUSTER_TYPE_IN]:
cluster_pair = ClusterPair(
source_cluster=source_clusters[endpoint_id][CLUSTER_TYPE_OUT][
cluster_id
],
target_cluster=target_clusters[t_endpoint_id][CLUSTER_TYPE_IN][
cluster_id
],
)
clusters_to_bind.append(cluster_pair)
return clusters_to_bind
@callback
def async_is_bindable_target(source_zha_device, target_zha_device):
"""Determine if target is bindable to source."""
source_clusters = source_zha_device.async_get_std_clusters()
target_clusters = target_zha_device.async_get_std_clusters()
for endpoint_id in source_clusters:
for t_endpoint_id in target_clusters:
matches = set(
source_clusters[endpoint_id][CLUSTER_TYPE_OUT].keys()
).intersection(target_clusters[t_endpoint_id][CLUSTER_TYPE_IN].keys())
if any(bindable in BINDABLE_CLUSTERS for bindable in matches):
return True
return False
async def async_get_zha_device(hass, device_id):
"""Get a ZHA device for the given device registry id."""
device_registry = await hass.helpers.device_registry.async_get_registry()
registry_device = device_registry.async_get(device_id)
zha_gateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
ieee_address = list(list(registry_device.identifiers)[0])[1]
ieee = zigpy.types.EUI64.convert(ieee_address)
return zha_gateway.devices[ieee]
class LogMixin:
"""Log helper."""
def log(self, level, msg, *args):
"""Log with level."""
raise NotImplementedError
def debug(self, msg, *args):
"""Debug level log."""
return self.log(logging.DEBUG, msg, *args)
def info(self, msg, *args):
"""Info level log."""
return self.log(logging.INFO, msg, *args)
def warning(self, msg, *args):
"""Warning method log."""
return self.log(logging.WARNING, msg, *args)
def error(self, msg, *args):
"""Error level log."""
return self.log(logging.ERROR, msg, *args)
@callback
def async_get_device_info(hass, device, ha_device_registry=None):
"""Get ZHA device."""
zha_gateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
ret_device = {}
ret_device.update(device.device_info)
ret_device["entities"] = [
{
"entity_id": entity_ref.reference_id,
ATTR_NAME: entity_ref.device_info[ATTR_NAME],
}
for entity_ref in zha_gateway.device_registry[device.ieee]
]
if ha_device_registry is not None:
reg_device = ha_device_registry.async_get_device(
{(DOMAIN, str(device.ieee))}, set()
)
if reg_device is not None:
ret_device["user_given_name"] = reg_device.name_by_user
ret_device["device_reg_id"] = reg_device.id
ret_device["area_id"] = reg_device.area_id
return ret_device
| leppa/home-assistant | homeassistant/components/zha/core/helpers.py | Python | apache-2.0 | 5,173 |
# -*- test-case-name: txweb2.dav.test.test_put -*-
##
# Copyright (c) 2005-2017 Apple Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# DRI: Wilfredo Sanchez, [email protected]
##
"""
WebDAV PUT method
"""
__all__ = ["preconditions_PUT", "http_PUT"]
from twisted.internet.defer import deferredGenerator, waitForDeferred
from twext.python.log import Logger
from txweb2 import responsecode
from txweb2.http import HTTPError, StatusResponse
from txdav.xml import element as davxml
from txweb2.dav.method import put_common
from txweb2.dav.util import parentForURL
log = Logger()
def preconditions_PUT(self, request):
#
# Check authentication and access controls
#
if self.exists():
x = waitForDeferred(self.authorize(request, (davxml.WriteContent(),)))
yield x
x.getResult()
else:
parent = waitForDeferred(request.locateResource(parentForURL(request.uri)))
yield parent
parent = parent.getResult()
if not parent.exists():
raise HTTPError(
StatusResponse(
responsecode.CONFLICT,
"cannot PUT to non-existent parent"))
x = waitForDeferred(parent.authorize(request, (davxml.Bind(),)))
yield x
x.getResult()
#
# HTTP/1.1 (RFC 2068, section 9.6) requires that we respond with a Not
# Implemented error if we get a Content-* header which we don't
# recognize and handle properly.
#
for header, _ignore_value in request.headers.getAllRawHeaders():
if header.startswith("Content-") and header not in (
# "Content-Base", # Doesn't make sense in PUT?
# "Content-Encoding", # Requires that we decode it?
"Content-Language",
"Content-Length",
# "Content-Location", # Doesn't make sense in PUT?
"Content-MD5",
# "Content-Range", # FIXME: Need to implement this
"Content-Type",
):
log.error("Client sent unrecognized content header in PUT request: %s"
% (header,))
raise HTTPError(StatusResponse(
responsecode.NOT_IMPLEMENTED,
"Unrecognized content header %r in request." % (header,)
))
preconditions_PUT = deferredGenerator(preconditions_PUT)
def http_PUT(self, request):
"""
Respond to a PUT request. (RFC 2518, section 8.7)
"""
log.info("Writing request stream to %s" % (self,))
#
# Don't pass in the request URI, since PUT isn't specified to be able
# to return a MULTI_STATUS response, which is WebDAV-specific (and PUT is
# not).
#
# return put(request.stream, self.fp)
return put_common.storeResource(request, destination=self, destination_uri=request.uri)
| macosforge/ccs-calendarserver | txweb2/dav/method/put.py | Python | apache-2.0 | 3,841 |
#!/usr/bin/env python
#coding=utf-8
'''
Override configurations.
'''
configs = {
'db': {
'host': '127.0.0.1'
}
} | shuchangwen/awesome-python-webapp | www/config_override.py | Python | apache-2.0 | 130 |
"""Defines utility functions for executing commands on the command line."""
import logging
import subprocess
import re
from util.exceptions import UnbalancedBrackets
logger = logging.getLogger(__name__)
class CommandError(Exception):
def __init__(self, msg, returncode=None):
super(CommandError, self).__init__(msg)
self.returncode = returncode
def execute_command_line(cmd_list):
"""Executes the given command list on the command line
:param cmd_list: The list of commands
:type cmd_list: []
"""
logger.debug('Executing: %s', ' '.join(cmd_list))
try:
subprocess.check_output(cmd_list, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as ex:
raise CommandError('Exit code %i: %s' % (ex.returncode, ex.output), ex.returncode)
def environment_expansion(env_map, cmd_string, remove_extras=False):
"""Performs environment variable expansion into command string
The original preference was to use bash directly, eliminating the need for us to maintain
regular expressions to mimic bash expansion logic. Unfortunately, the overhead of instantiating
a sub-shell was prohibitively high on a large scale.
We are instead handling merely a subset of expansion options:
$VAR
${VAR}
${VAR/#/PREFIX}
WARNING: Resulting string should be treated as sensitive, due to the possibility
of secrets being injected.
:param env_map: map of environment variables to their values
:type env_map: dict
:param cmd_string: string to inject environment variables into
:type cmd_string: str
:param remove_extras: whether to remove extra parameters that do not have a value
:type remove_extras: bool
:return: string with parameters expanded
:rtype: str
:raises :class:`util.exceptions.UnbalancedBrackets`: if brackets are not balanced in cmd_string
"""
# inline function to capture
def dict_lookup(match):
prefix = None
value = match.group(0)
key = match.group(1)
key = key.lstrip('{').rstrip('}')
# Handle special case for prefixed expansion
if '/#/' in key:
key, sep, prefix = key.split('/')
if key in env_map:
value = env_map[key]
# If a prefix was found, insert at beginning of returned value
if prefix:
value = prefix + value
elif remove_extras:
value = ''
# Cast to str as replacement could potentially be a non-string value
return str(value)
if cmd_string.count('{') != cmd_string.count('}'):
raise UnbalancedBrackets
expanded_str = re.sub(r'\$(\w+|\{[^}]*\})', dict_lookup, cmd_string)
# Remove any extra whitespace in the command arguments
expanded_str = ' '.join(expanded_str.split())
return expanded_str
| ngageoint/scale | scale/util/command.py | Python | apache-2.0 | 2,852 |
from aiida import load_dbenv
load_dbenv()
from aiida.orm import Code, DataFactory, load_node
StructureData = DataFactory('structure')
ParameterData = DataFactory('parameter')
import numpy as np
import os
codename = 'phono3py@boston-lab'
code = Code.get_from_string(codename)
###############
# Nitrogen structure
a = 5.7906907399999996
cell = [[a, 0, 0],
[0, a, 0],
[0, 0, a]]
symbols=['N'] * 8
scaled_positions = [(0.5554032500000000, 0.5554032500000000, 0.5554032500000000),
(0.9445967500000000, 0.4445967500000000, 0.0554032500000000),
(0.0554032500000000, 0.9445967500000000, 0.4445967500000000),
(0.4445967500000000, 0.0554032500000000, 0.9445967500000000),
(0.4445967500000000, 0.4445967500000000, 0.4445967500000000),
(0.0554032500000000, 0.5554032500000000, 0.9445967500000000),
(0.9445967500000000, 0.0554032500000000, 0.5554032500000000),
(0.5554032500000000, 0.9445967500000000, 0.0554032500000000)]
##############
structure = StructureData(cell=cell)
positions = np.dot(scaled_positions, cell)
for i, scaled_position in enumerate(scaled_positions):
structure.append_atom(position=np.dot(scaled_position, cell).tolist(),
symbols=symbols[i])
# Machine
machine_dict = {'resources': {'num_machines': 1,
'parallel_env': 'mpi*',
'tot_num_mpiprocs': 16},
'max_wallclock_seconds': 3600 * 10,
}
# PHONOPY settings
parameters = ParameterData(dict={'supercell': [[2, 0, 0],
[0, 2, 0],
[0, 0, 2]],
'primitive': [[0.0, 1.0, 1.0],
[1.0, 0.0, 1.0],
[1.0, 1.0, 0.0]],
'distance': 0.01,
'mesh': [20, 20, 20],
'symmetry_precision': 1e-5,
})
calc = code.new_calc(max_wallclock_seconds=3600,
resources={"num_machines": 1,
"parallel_env":"mpi*",
"tot_num_mpiprocs": 6})
calc.label = "test phono3py calculation"
calc.description = "A much longer description"
calc.use_structure(structure)
calc.use_code(code)
calc.use_parameters(parameters)
# Chose to use forces or force constants
if True:
calc.use_data_sets(load_node(47616)) # This node should contain a ForceSetsData object
else:
calc.use_force_constants(load_node(62098))
calc.use_force_constants_3(load_node(62097))
if False:
subfolder, script_filename = calc.submit_test()
print "Test_submit for calculation (uuid='{}')".format(calc.uuid)
print "Submit file in {}".format(os.path.join(
os.path.relpath(subfolder.abspath),
script_filename))
else:
calc.store_all()
print "created calculation with PK={}".format(calc.pk)
calc.submit()
| yw-fang/readingnotes | abinitio/aiida/aiida_plugins_notes_examples/aiida-phonopy/FANG-examples/n/plugin/launch_phono3py_n-boston.py | Python | apache-2.0 | 3,189 |
'''
A worker used for testing how other components handle background tasks.
This worker has 2 methods: one that sleeps but doesn't provide progress,
and another that sleeps and provides update events every 1 second.
This also serves as a good template for copy/paste when creating a new worker.
'''
import math
import time
import worker
def sleep_determinate(period):
''' Sleep for a specified period of time with progress updates.'''
total_sleep = 0
worker.start_job(total=int(math.ceil(period)))
while total_sleep < period:
time.sleep(1)
total_sleep += 1
worker.update_job(current=total_sleep)
worker.finish_job()
def sleep_exception(period):
''' Sleep for a specified period then raise an exception.'''
worker.start_job()
time.sleep(period)
raise ValueError('sleep_exception() is deliberately raising an exception.')
def sleep_indeterminate(period):
''' Sleep for a specified period of time with no progress updates.'''
worker.start_job()
time.sleep(period)
worker.finish_job()
| TeamHG-Memex/hgprofiler | lib/worker/sleep.py | Python | apache-2.0 | 1,072 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import six
from heat.common import exception
from heat.engine.cfn import functions as cfn_funcs
from heat.engine import constraints
from heat.engine.hot import parameters as hot_param
from heat.engine import parameters
from heat.engine import plugin_manager
from heat.engine import properties
from heat.engine import resources
from heat.engine import support
from heat.tests import common
class PropertySchemaTest(common.HeatTestCase):
def test_schema_all(self):
d = {
'type': 'string',
'description': 'A string',
'default': 'wibble',
'required': False,
'update_allowed': False,
'immutable': False,
'constraints': [
{'length': {'min': 4, 'max': 8}},
]
}
s = properties.Schema(properties.Schema.STRING, 'A string',
default='wibble',
constraints=[constraints.Length(4, 8)])
self.assertEqual(d, dict(s))
def test_schema_list_schema(self):
d = {
'type': 'list',
'description': 'A list',
'schema': {
'*': {
'type': 'string',
'description': 'A string',
'default': 'wibble',
'required': False,
'update_allowed': False,
'immutable': False,
'constraints': [
{'length': {'min': 4, 'max': 8}},
]
}
},
'required': False,
'update_allowed': False,
'immutable': False,
}
s = properties.Schema(properties.Schema.STRING, 'A string',
default='wibble',
constraints=[constraints.Length(4, 8)])
l = properties.Schema(properties.Schema.LIST, 'A list', schema=s)
self.assertEqual(d, dict(l))
def test_schema_map_schema(self):
d = {
'type': 'map',
'description': 'A map',
'schema': {
'Foo': {
'type': 'string',
'description': 'A string',
'default': 'wibble',
'required': False,
'update_allowed': False,
'immutable': False,
'constraints': [
{'length': {'min': 4, 'max': 8}},
]
}
},
'required': False,
'update_allowed': False,
'immutable': False,
}
s = properties.Schema(properties.Schema.STRING, 'A string',
default='wibble',
constraints=[constraints.Length(4, 8)])
m = properties.Schema(properties.Schema.MAP, 'A map',
schema={'Foo': s})
self.assertEqual(d, dict(m))
def test_schema_nested_schema(self):
d = {
'type': 'list',
'description': 'A list',
'schema': {
'*': {
'type': 'map',
'description': 'A map',
'schema': {
'Foo': {
'type': 'string',
'description': 'A string',
'default': 'wibble',
'required': False,
'update_allowed': False,
'immutable': False,
'constraints': [
{'length': {'min': 4, 'max': 8}},
]
}
},
'required': False,
'update_allowed': False,
'immutable': False,
}
},
'required': False,
'update_allowed': False,
'immutable': False,
}
s = properties.Schema(properties.Schema.STRING, 'A string',
default='wibble',
constraints=[constraints.Length(4, 8)])
m = properties.Schema(properties.Schema.MAP, 'A map',
schema={'Foo': s})
l = properties.Schema(properties.Schema.LIST, 'A list', schema=m)
self.assertEqual(d, dict(l))
def test_all_resource_schemata(self):
for resource_type in resources.global_env().get_types():
for schema in six.itervalues(getattr(resource_type,
'properties_schema',
{})):
properties.Schema.from_legacy(schema)
def test_from_legacy_idempotency(self):
s = properties.Schema(properties.Schema.STRING)
self.assertTrue(properties.Schema.from_legacy(s) is s)
def test_from_legacy_minimal_string(self):
s = properties.Schema.from_legacy({
'Type': 'String',
})
self.assertEqual(properties.Schema.STRING, s.type)
self.assertIsNone(s.description)
self.assertIsNone(s.default)
self.assertFalse(s.required)
self.assertEqual(0, len(s.constraints))
def test_from_legacy_string(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'Description': 'a string',
'Default': 'wibble',
'Implemented': False,
'MinLength': 4,
'MaxLength': 8,
'AllowedValues': ['blarg', 'wibble'],
'AllowedPattern': '[a-z]*',
})
self.assertEqual(properties.Schema.STRING, s.type)
self.assertEqual('a string', s.description)
self.assertEqual('wibble', s.default)
self.assertFalse(s.required)
self.assertEqual(3, len(s.constraints))
self.assertFalse(s.immutable)
def test_from_legacy_min_length(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'MinLength': 4,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Length)
self.assertEqual(4, c.min)
self.assertIsNone(c.max)
def test_from_legacy_max_length(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'MaxLength': 8,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Length)
self.assertIsNone(c.min)
self.assertEqual(8, c.max)
def test_from_legacy_minmax_length(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'MinLength': 4,
'MaxLength': 8,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Length)
self.assertEqual(4, c.min)
self.assertEqual(8, c.max)
def test_from_legacy_minmax_string_length(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'MinLength': '4',
'MaxLength': '8',
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Length)
self.assertEqual(4, c.min)
self.assertEqual(8, c.max)
def test_from_legacy_min_value(self):
s = properties.Schema.from_legacy({
'Type': 'Integer',
'MinValue': 4,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Range)
self.assertEqual(4, c.min)
self.assertIsNone(c.max)
def test_from_legacy_max_value(self):
s = properties.Schema.from_legacy({
'Type': 'Integer',
'MaxValue': 8,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Range)
self.assertIsNone(c.min)
self.assertEqual(8, c.max)
def test_from_legacy_minmax_value(self):
s = properties.Schema.from_legacy({
'Type': 'Integer',
'MinValue': 4,
'MaxValue': 8,
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Range)
self.assertEqual(4, c.min)
self.assertEqual(8, c.max)
def test_from_legacy_minmax_string_value(self):
s = properties.Schema.from_legacy({
'Type': 'Integer',
'MinValue': '4',
'MaxValue': '8',
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.Range)
self.assertEqual(4, c.min)
self.assertEqual(8, c.max)
def test_from_legacy_allowed_values(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'AllowedValues': ['blarg', 'wibble'],
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.AllowedValues)
self.assertEqual(('blarg', 'wibble'), c.allowed)
def test_from_legacy_allowed_pattern(self):
s = properties.Schema.from_legacy({
'Type': 'String',
'AllowedPattern': '[a-z]*',
})
self.assertEqual(1, len(s.constraints))
c = s.constraints[0]
self.assertIsInstance(c, constraints.AllowedPattern)
self.assertEqual('[a-z]*', c.pattern)
def test_from_legacy_list(self):
l = properties.Schema.from_legacy({
'Type': 'List',
'Default': ['wibble'],
'Schema': {
'Type': 'String',
'Default': 'wibble',
'MaxLength': 8,
}
})
self.assertEqual(properties.Schema.LIST, l.type)
self.assertEqual(['wibble'], l.default)
ss = l.schema[0]
self.assertEqual(properties.Schema.STRING, ss.type)
self.assertEqual('wibble', ss.default)
def test_from_legacy_map(self):
l = properties.Schema.from_legacy({
'Type': 'Map',
'Schema': {
'foo': {
'Type': 'String',
'Default': 'wibble',
}
}
})
self.assertEqual(properties.Schema.MAP, l.type)
ss = l.schema['foo']
self.assertEqual(properties.Schema.STRING, ss.type)
self.assertEqual('wibble', ss.default)
def test_from_legacy_invalid_key(self):
self.assertRaises(exception.InvalidSchemaError,
properties.Schema.from_legacy,
{'Type': 'String', 'Foo': 'Bar'})
def test_from_string_param(self):
description = "WebServer EC2 instance type"
allowed_values = ["t1.micro", "m1.small", "m1.large", "m1.xlarge",
"m2.xlarge", "m2.2xlarge", "m2.4xlarge",
"c1.medium", "c1.xlarge", "cc1.4xlarge"]
constraint_desc = "Must be a valid EC2 instance type."
param = parameters.Schema.from_dict('name', {
"Type": "String",
"Description": description,
"Default": "m1.large",
"AllowedValues": allowed_values,
"ConstraintDescription": constraint_desc,
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.STRING, schema.type)
self.assertEqual(description, schema.description)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
allowed_constraint = schema.constraints[0]
self.assertEqual(tuple(allowed_values), allowed_constraint.allowed)
self.assertEqual(constraint_desc, allowed_constraint.description)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_string_allowed_pattern(self):
description = "WebServer EC2 instance type"
allowed_pattern = "[A-Za-z0-9.]*"
constraint_desc = "Must contain only alphanumeric characters."
param = parameters.Schema.from_dict('name', {
"Type": "String",
"Description": description,
"Default": "m1.large",
"AllowedPattern": allowed_pattern,
"ConstraintDescription": constraint_desc,
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.STRING, schema.type)
self.assertEqual(description, schema.description)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
allowed_constraint = schema.constraints[0]
self.assertEqual(allowed_pattern, allowed_constraint.pattern)
self.assertEqual(constraint_desc, allowed_constraint.description)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_string_multi_constraints(self):
description = "WebServer EC2 instance type"
allowed_pattern = "[A-Za-z0-9.]*"
constraint_desc = "Must contain only alphanumeric characters."
param = parameters.Schema.from_dict('name', {
"Type": "String",
"Description": description,
"Default": "m1.large",
"MinLength": "7",
"AllowedPattern": allowed_pattern,
"ConstraintDescription": constraint_desc,
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.STRING, schema.type)
self.assertEqual(description, schema.description)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(2, len(schema.constraints))
len_constraint = schema.constraints[0]
allowed_constraint = schema.constraints[1]
self.assertEqual(7, len_constraint.min)
self.assertIsNone(len_constraint.max)
self.assertEqual(allowed_pattern, allowed_constraint.pattern)
self.assertEqual(constraint_desc, allowed_constraint.description)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_param_string_min_len(self):
param = parameters.Schema.from_dict('name', {
"Description": "WebServer EC2 instance type",
"Type": "String",
"Default": "m1.large",
"MinLength": "7",
})
schema = properties.Schema.from_parameter(param)
self.assertFalse(schema.required)
self.assertIsNone(schema.default)
self.assertEqual(1, len(schema.constraints))
len_constraint = schema.constraints[0]
self.assertEqual(7, len_constraint.min)
self.assertIsNone(len_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_param_string_max_len(self):
param = parameters.Schema.from_dict('name', {
"Description": "WebServer EC2 instance type",
"Type": "String",
"Default": "m1.large",
"MaxLength": "11",
})
schema = properties.Schema.from_parameter(param)
self.assertFalse(schema.required)
self.assertIsNone(schema.default)
self.assertEqual(1, len(schema.constraints))
len_constraint = schema.constraints[0]
self.assertIsNone(len_constraint.min)
self.assertEqual(11, len_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_param_string_min_max_len(self):
param = parameters.Schema.from_dict('name', {
"Description": "WebServer EC2 instance type",
"Type": "String",
"Default": "m1.large",
"MinLength": "7",
"MaxLength": "11",
})
schema = properties.Schema.from_parameter(param)
self.assertFalse(schema.required)
self.assertIsNone(schema.default)
self.assertEqual(1, len(schema.constraints))
len_constraint = schema.constraints[0]
self.assertEqual(7, len_constraint.min)
self.assertEqual(11, len_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_param_no_default(self):
param = parameters.Schema.from_dict('name', {
"Description": "WebServer EC2 instance type",
"Type": "String",
})
schema = properties.Schema.from_parameter(param)
self.assertTrue(schema.required)
self.assertIsNone(schema.default)
self.assertEqual(0, len(schema.constraints))
self.assertFalse(schema.allow_conversion)
props = properties.Properties({'name': schema}, {'name': 'm1.large'})
props.validate()
def test_from_number_param_min(self):
param = parameters.Schema.from_dict('name', {
"Type": "Number",
"Default": "42",
"MinValue": "10",
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.NUMBER, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
value_constraint = schema.constraints[0]
self.assertEqual(10, value_constraint.min)
self.assertIsNone(value_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_number_param_max(self):
param = parameters.Schema.from_dict('name', {
"Type": "Number",
"Default": "42",
"MaxValue": "100",
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.NUMBER, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
value_constraint = schema.constraints[0]
self.assertIsNone(value_constraint.min)
self.assertEqual(100, value_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_number_param_min_max(self):
param = parameters.Schema.from_dict('name', {
"Type": "Number",
"Default": "42",
"MinValue": "10",
"MaxValue": "100",
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.NUMBER, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
value_constraint = schema.constraints[0]
self.assertEqual(10, value_constraint.min)
self.assertEqual(100, value_constraint.max)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_number_param_allowed_vals(self):
constraint_desc = "The quick brown fox jumps over the lazy dog."
param = parameters.Schema.from_dict('name', {
"Type": "Number",
"Default": "42",
"AllowedValues": ["10", "42", "100"],
"ConstraintDescription": constraint_desc,
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.NUMBER, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertEqual(1, len(schema.constraints))
self.assertFalse(schema.allow_conversion)
allowed_constraint = schema.constraints[0]
self.assertEqual(('10', '42', '100'), allowed_constraint.allowed)
self.assertEqual(constraint_desc, allowed_constraint.description)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_list_param(self):
param = parameters.Schema.from_dict('name', {
"Type": "CommaDelimitedList",
"Default": "foo,bar,baz"
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.LIST, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertFalse(schema.allow_conversion)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_from_json_param(self):
param = parameters.Schema.from_dict('name', {
"Type": "Json",
"Default": {"foo": "bar", "blarg": "wibble"}
})
schema = properties.Schema.from_parameter(param)
self.assertEqual(properties.Schema.MAP, schema.type)
self.assertIsNone(schema.default)
self.assertFalse(schema.required)
self.assertTrue(schema.allow_conversion)
props = properties.Properties({'test': schema}, {})
props.validate()
def test_no_mismatch_in_update_policy(self):
manager = plugin_manager.PluginManager('heat.engine.resources')
resource_mapping = plugin_manager.PluginMapping('resource')
res_plugin_mappings = resource_mapping.load_all(manager)
all_resources = {}
for mapping in res_plugin_mappings:
name, cls = mapping
all_resources[name] = cls
def check_update_policy(resource_type, prop_key, prop, update=False):
if prop.update_allowed:
update = True
sub_schema = prop.schema
if sub_schema:
for sub_prop_key, sub_prop in six.iteritems(sub_schema):
if not update:
self.assertEqual(update, sub_prop.update_allowed,
"Mismatch in update policies: "
"resource %(res)s, properties "
"'%(prop)s' and '%(nested_prop)s'." %
{'res': resource_type,
'prop': prop_key,
'nested_prop': sub_prop_key})
if sub_prop_key is '*':
check_update_policy(resource_type, prop_key,
sub_prop, update)
else:
check_update_policy(resource_type, sub_prop_key,
sub_prop, update)
for resource_type, resource_class in six.iteritems(all_resources):
props_schemata = properties.schemata(
resource_class.properties_schema)
for prop_key, prop in six.iteritems(props_schemata):
check_update_policy(resource_type, prop_key, prop)
class PropertyTest(common.HeatTestCase):
def test_required_default(self):
p = properties.Property({'Type': 'String'})
self.assertFalse(p.required())
def test_required_false(self):
p = properties.Property({'Type': 'String', 'Required': False})
self.assertFalse(p.required())
def test_required_true(self):
p = properties.Property({'Type': 'String', 'Required': True})
self.assertTrue(p.required())
def test_implemented_default(self):
p = properties.Property({'Type': 'String'})
self.assertTrue(p.implemented())
def test_implemented_false(self):
p = properties.Property({'Type': 'String', 'Implemented': False})
self.assertFalse(p.implemented())
def test_implemented_true(self):
p = properties.Property({'Type': 'String', 'Implemented': True})
self.assertTrue(p.implemented())
def test_no_default(self):
p = properties.Property({'Type': 'String'})
self.assertFalse(p.has_default())
def test_default(self):
p = properties.Property({'Type': 'String', 'Default': 'wibble'})
self.assertEqual('wibble', p.default())
def test_type(self):
p = properties.Property({'Type': 'String'})
self.assertEqual('String', p.type())
def test_bad_type(self):
self.assertRaises(exception.InvalidSchemaError,
properties.Property, {'Type': 'Fish'})
def test_bad_key(self):
self.assertRaises(exception.InvalidSchemaError,
properties.Property,
{'Type': 'String', 'Foo': 'Bar'})
def test_string_pattern_good(self):
schema = {'Type': 'String',
'AllowedPattern': '[a-z]*'}
p = properties.Property(schema)
self.assertEqual('foo', p.get_value('foo', True))
def test_string_pattern_bad_prefix(self):
schema = {'Type': 'String',
'AllowedPattern': '[a-z]*'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, '1foo', True)
def test_string_pattern_bad_suffix(self):
schema = {'Type': 'String',
'AllowedPattern': '[a-z]*'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, 'foo1', True)
def test_string_value_list_good(self):
schema = {'Type': 'String',
'AllowedValues': ['foo', 'bar', 'baz']}
p = properties.Property(schema)
self.assertEqual('bar', p.get_value('bar', True))
def test_string_value_list_bad(self):
schema = {'Type': 'String',
'AllowedValues': ['foo', 'bar', 'baz']}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, 'blarg', True)
def test_string_maxlength_good(self):
schema = {'Type': 'String',
'MaxLength': '5'}
p = properties.Property(schema)
self.assertEqual('abcd', p.get_value('abcd', True))
def test_string_exceeded_maxlength(self):
schema = {'Type': 'String',
'MaxLength': '5'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, 'abcdef', True)
def test_string_length_in_range(self):
schema = {'Type': 'String',
'MinLength': '5',
'MaxLength': '10'}
p = properties.Property(schema)
self.assertEqual('abcdef', p.get_value('abcdef', True))
def test_string_minlength_good(self):
schema = {'Type': 'String',
'MinLength': '5'}
p = properties.Property(schema)
self.assertEqual('abcde', p.get_value('abcde', True))
def test_string_smaller_than_minlength(self):
schema = {'Type': 'String',
'MinLength': '5'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, 'abcd', True)
def test_int_good(self):
schema = {'Type': 'Integer',
'MinValue': 3,
'MaxValue': 3}
p = properties.Property(schema)
self.assertEqual(3, p.get_value(3, True))
def test_int_bad(self):
schema = {'Type': 'Integer'}
p = properties.Property(schema)
# python 3.4.3 returns another error message
# try to handle this by regexp
self.assertRaisesRegexp(
TypeError, "int\(\) argument must be a string(, a bytes-like "
"object)? or a number, not 'list'", p.get_value, [1])
def test_str_from_int(self):
schema = {'Type': 'String'}
p = properties.Property(schema)
self.assertEqual('3', p.get_value(3))
def test_str_from_bool(self):
schema = {'Type': 'String'}
p = properties.Property(schema)
self.assertEqual('True', p.get_value(True))
def test_int_from_str_good(self):
schema = {'Type': 'Integer'}
p = properties.Property(schema)
self.assertEqual(3, p.get_value('3'))
def test_int_from_str_bad(self):
schema = {'Type': 'Integer'}
p = properties.Property(schema)
ex = self.assertRaises(TypeError, p.get_value, '3a')
self.assertEqual("Value '3a' is not an integer", six.text_type(ex))
def test_integer_low(self):
schema = {'Type': 'Integer',
'MinValue': 4}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed, p.get_value, 3,
True)
def test_integer_high(self):
schema = {'Type': 'Integer',
'MaxValue': 2}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed, p.get_value, 3,
True)
def test_integer_value_list_good(self):
schema = {'Type': 'Integer',
'AllowedValues': [1, 3, 5]}
p = properties.Property(schema)
self.assertEqual(5, p.get_value(5), True)
def test_integer_value_list_bad(self):
schema = {'Type': 'Integer',
'AllowedValues': [1, 3, 5]}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed, p.get_value, 2,
True)
def test_number_good(self):
schema = {'Type': 'Number',
'MinValue': '3',
'MaxValue': '3'}
p = properties.Property(schema)
self.assertEqual(3, p.get_value(3, True))
def test_numbers_from_strings(self):
"""Numbers can be converted from strings."""
schema = {'Type': 'Number',
'MinValue': '3',
'MaxValue': '3'}
p = properties.Property(schema)
self.assertEqual(3, p.get_value('3'))
def test_number_value_list_good(self):
schema = {'Type': 'Number',
'AllowedValues': [1, 3, 5]}
p = properties.Property(schema)
self.assertEqual(5, p.get_value('5', True))
def test_number_value_list_bad(self):
schema = {'Type': 'Number',
'AllowedValues': ['1', '3', '5']}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, '2', True)
def test_number_low(self):
schema = {'Type': 'Number',
'MinValue': '4'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, '3', True)
def test_number_high(self):
schema = {'Type': 'Number',
'MaxValue': '2'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, '3', True)
def test_boolean_true(self):
p = properties.Property({'Type': 'Boolean'})
self.assertIs(True, p.get_value('True'))
self.assertIs(True, p.get_value('true'))
self.assertIs(True, p.get_value(True))
def test_boolean_false(self):
p = properties.Property({'Type': 'Boolean'})
self.assertIs(False, p.get_value('False'))
self.assertIs(False, p.get_value('false'))
self.assertIs(False, p.get_value(False))
def test_boolean_invalid(self):
p = properties.Property({'Type': 'Boolean'})
self.assertRaises(ValueError, p.get_value, 'fish')
def test_list_string(self):
p = properties.Property({'Type': 'List'})
self.assertRaises(TypeError, p.get_value, 'foo')
def test_list_good(self):
p = properties.Property({'Type': 'List'})
self.assertEqual(['foo', 'bar'], p.get_value(['foo', 'bar']))
def test_list_dict(self):
p = properties.Property({'Type': 'List'})
self.assertRaises(TypeError, p.get_value, {'foo': 'bar'})
def test_list_maxlength_good(self):
schema = {'Type': 'List',
'MaxLength': '3'}
p = properties.Property(schema)
self.assertEqual(['1', '2'], p.get_value(['1', '2'], True))
def test_list_exceeded_maxlength(self):
schema = {'Type': 'List',
'MaxLength': '2'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, ['1', '2', '3'], True)
def test_list_length_in_range(self):
schema = {'Type': 'List',
'MinLength': '2',
'MaxLength': '4'}
p = properties.Property(schema)
self.assertEqual(['1', '2', '3'], p.get_value(['1', '2', '3'], True))
def test_list_minlength_good(self):
schema = {'Type': 'List',
'MinLength': '3'}
p = properties.Property(schema)
self.assertEqual(['1', '2', '3'], p.get_value(['1', '2', '3'], True))
def test_list_smaller_than_minlength(self):
schema = {'Type': 'List',
'MinLength': '4'}
p = properties.Property(schema)
self.assertRaises(exception.StackValidationFailed,
p.get_value, ['1', '2', '3'], True)
def test_map_list_default(self):
schema = {'Type': 'Map',
'Default': ['foo', 'bar']}
p = properties.Property(schema)
p.schema.allow_conversion = True
self.assertEqual(jsonutils.dumps(['foo', 'bar']),
p.get_value(None))
def test_map_list_default_empty(self):
schema = {'Type': 'Map',
'Default': []}
p = properties.Property(schema)
p.schema.allow_conversion = True
self.assertEqual(jsonutils.dumps([]), p.get_value(None))
def test_map_list_no_default(self):
schema = {'Type': 'Map'}
p = properties.Property(schema)
p.schema.allow_conversion = True
self.assertEqual({}, p.get_value(None))
def test_map_string(self):
p = properties.Property({'Type': 'Map'})
self.assertRaises(TypeError, p.get_value, 'foo')
def test_map_list(self):
p = properties.Property({'Type': 'Map'})
self.assertRaises(TypeError, p.get_value, ['foo'])
def test_map_allow_conversion(self):
p = properties.Property({'Type': 'Map'})
p.schema.allow_conversion = True
self.assertEqual('foo', p.get_value('foo'))
self.assertEqual(jsonutils.dumps(['foo']), p.get_value(['foo']))
def test_map_schema_good(self):
map_schema = {'valid': {'Type': 'Boolean'}}
p = properties.Property({'Type': 'Map', 'Schema': map_schema})
self.assertEqual({'valid': True}, p.get_value({'valid': 'TRUE'}))
def test_map_schema_bad_data(self):
map_schema = {'valid': {'Type': 'Boolean'}}
p = properties.Property({'Type': 'Map', 'Schema': map_schema})
ex = self.assertRaises(exception.StackValidationFailed,
p.get_value, {'valid': 'fish'}, True)
self.assertEqual('Property error: valid: "fish" is not a '
'valid boolean', six.text_type(ex))
def test_map_schema_missing_data(self):
map_schema = {'valid': {'Type': 'Boolean'}}
p = properties.Property({'Type': 'Map', 'Schema': map_schema})
self.assertEqual({'valid': None}, p.get_value({}))
def test_map_schema_missing_required_data(self):
map_schema = {'valid': {'Type': 'Boolean', 'Required': True}}
p = properties.Property({'Type': 'Map', 'Schema': map_schema})
ex = self.assertRaises(exception.StackValidationFailed,
p.get_value, {}, True)
self.assertEqual('Property error: Property valid not assigned',
six.text_type(ex))
def test_list_schema_good(self):
map_schema = {'valid': {'Type': 'Boolean'}}
list_schema = {'Type': 'Map', 'Schema': map_schema}
p = properties.Property({'Type': 'List', 'Schema': list_schema})
self.assertEqual([{'valid': True},
{'valid': False}],
p.get_value([{'valid': 'TRUE'},
{'valid': 'False'}]))
def test_list_schema_bad_data(self):
map_schema = {'valid': {'Type': 'Boolean'}}
list_schema = {'Type': 'Map', 'Schema': map_schema}
p = properties.Property({'Type': 'List', 'Schema': list_schema})
ex = self.assertRaises(exception.StackValidationFailed,
p.get_value,
[{'valid': 'True'}, {'valid': 'fish'}], True)
self.assertEqual('Property error: [1].valid: "fish" is not '
'a valid boolean', six.text_type(ex))
def test_list_schema_int_good(self):
list_schema = {'Type': 'Integer'}
p = properties.Property({'Type': 'List', 'Schema': list_schema})
self.assertEqual([1, 2, 3], p.get_value([1, 2, 3]))
def test_list_schema_int_bad_data(self):
list_schema = {'Type': 'Integer'}
p = properties.Property({'Type': 'List', 'Schema': list_schema})
ex = self.assertRaises(exception.StackValidationFailed,
p.get_value, [42, 'fish'], True)
self.assertEqual("Property error: [1]: Value 'fish' is not "
"an integer", six.text_type(ex))
class PropertiesTest(common.HeatTestCase):
def setUp(self):
super(PropertiesTest, self).setUp()
schema = {
'int': {'Type': 'Integer'},
'string': {'Type': 'String'},
'required_int': {'Type': 'Integer', 'Required': True},
'bad_int': {'Type': 'Integer'},
'missing': {'Type': 'Integer'},
'defaulted': {'Type': 'Integer', 'Default': 1},
'default_override': {'Type': 'Integer', 'Default': 1},
'default_bool': {'Type': 'Boolean', 'Default': 'false'},
}
data = {
'int': 21,
'string': 'foo',
'bad_int': 'foo',
'default_override': 21,
}
def double(d):
return d * 2
self.props = properties.Properties(schema, data, double, 'wibble')
def test_integer_good(self):
self.assertEqual(42, self.props['int'])
def test_string_good(self):
self.assertEqual('foofoo', self.props['string'])
def test_bool_not_str(self):
self.assertFalse(self.props['default_bool'])
def test_missing_required(self):
self.assertRaises(ValueError, self.props.get, 'required_int')
def test_integer_bad(self):
self.assertRaises(ValueError, self.props.get, 'bad_int')
def test_missing(self):
self.assertIsNone(self.props['missing'])
def test_default(self):
self.assertEqual(1, self.props['defaulted'])
def test_default_override(self):
self.assertEqual(42, self.props['default_override'])
def test_get_user_value(self):
self.assertIsNone(self.props.get_user_value('defaulted'))
self.assertEqual(42, self.props.get_user_value('default_override'))
def test_get_user_value_key_error(self):
ex = self.assertRaises(KeyError, self.props.get_user_value, 'foo')
# Note we have to use args here: https://bugs.python.org/issue2651
self.assertEqual('Invalid Property foo',
six.text_type(ex.args[0]))
def test_bad_key(self):
self.assertEqual('wibble', self.props.get('foo', 'wibble'))
def test_key_error(self):
ex = self.assertRaises(KeyError, self.props.__getitem__, 'foo')
# Note we have to use args here: https://bugs.python.org/issue2651
self.assertEqual('Invalid Property foo',
six.text_type(ex.args[0]))
def test_none_string(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual('', props['foo'])
def test_none_integer(self):
schema = {'foo': {'Type': 'Integer'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(0, props['foo'])
def test_none_number(self):
schema = {'foo': {'Type': 'Number'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(0, props['foo'])
def test_none_boolean(self):
schema = {'foo': {'Type': 'Boolean'}}
props = properties.Properties(schema, {'foo': None})
self.assertIs(False, props['foo'])
def test_none_map(self):
schema = {'foo': {'Type': 'Map'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual({}, props['foo'])
def test_none_list(self):
schema = {'foo': {'Type': 'List'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual([], props['foo'])
def test_none_default_string(self):
schema = {'foo': {'Type': 'String', 'Default': 'bar'}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual('bar', props['foo'])
def test_none_default_integer(self):
schema = {'foo': {'Type': 'Integer', 'Default': 42}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(42, props['foo'])
schema = {'foo': {'Type': 'Integer', 'Default': 0}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(0, props['foo'])
schema = {'foo': {'Type': 'Integer', 'Default': -273}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(-273, props['foo'])
def test_none_default_number(self):
schema = {'foo': {'Type': 'Number', 'Default': 42.0}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(42.0, props['foo'])
schema = {'foo': {'Type': 'Number', 'Default': 0.0}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(0.0, props['foo'])
schema = {'foo': {'Type': 'Number', 'Default': -273.15}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(-273.15, props['foo'])
def test_none_default_boolean(self):
schema = {'foo': {'Type': 'Boolean', 'Default': True}}
props = properties.Properties(schema, {'foo': None})
self.assertIs(True, props['foo'])
def test_none_default_map(self):
schema = {'foo': {'Type': 'Map', 'Default': {'bar': 'baz'}}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual({'bar': 'baz'}, props['foo'])
def test_none_default_list(self):
schema = {'foo': {'Type': 'List', 'Default': ['one', 'two']}}
props = properties.Properties(schema, {'foo': None})
self.assertEqual(['one', 'two'], props['foo'])
def test_resolve_returns_none(self):
schema = {'foo': {'Type': 'String', "MinLength": "5"}}
def test_resolver(prop):
return None
self.patchobject(properties.Properties,
'_find_deps_any_in_init').return_value = True
props = properties.Properties(schema,
{'foo': 'get_attr: [db, value]'},
test_resolver)
try:
self.assertIsNone(props.validate())
except exception.StackValidationFailed:
self.fail("Constraints should not have been evaluated.")
def test_resolve_ref_with_constraints(self):
# create test custom constraint
class IncorrectConstraint(constraints.BaseCustomConstraint):
expected_exceptions = (Exception,)
def validate_with_client(self, client, value):
raise Exception("Test exception")
class TestCustomConstraint(constraints.CustomConstraint):
@property
def custom_constraint(self):
return IncorrectConstraint()
# create schema with test constraint
schema = {
'foo': properties.Schema(
properties.Schema.STRING,
constraints=[TestCustomConstraint('test_constraint')]
)
}
# define parameters for function
def test_resolver(prop):
return 'None'
class rsrc(object):
action = INIT = "INIT"
class DummyStack(dict):
pass
stack = DummyStack(another_res=rsrc())
# define properties with function and constraint
props = properties.Properties(
schema,
{'foo': cfn_funcs.ResourceRef(
stack, 'get_resource', 'another_res')},
test_resolver)
try:
self.assertIsNone(props.validate())
except exception.StackValidationFailed:
self.fail("Constraints should not have been evaluated.")
def test_schema_from_params(self):
params_snippet = {
"DBUsername": {
"Type": "String",
"Description": "The WordPress database admin account username",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "16",
"ConstraintDescription": ("must begin with a letter and "
"contain only alphanumeric "
"characters.")
},
"KeyName": {
"Type": "String",
"Description": ("Name of an existing EC2 KeyPair to enable "
"SSH access to the instances")
},
"LinuxDistribution": {
"Default": "F17",
"Type": "String",
"Description": "Distribution of choice",
"AllowedValues": [
"F18",
"F17",
"U10",
"RHEL-6.1",
"RHEL-6.2",
"RHEL-6.3"
]
},
"DBPassword": {
"Type": "String",
"Description": "The WordPress database admin account password",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": ("must contain only alphanumeric "
"characters.")
},
"DBName": {
"AllowedPattern": "[a-zA-Z][a-zA-Z0-9]*",
"Type": "String",
"Description": "The WordPress database name",
"MaxLength": "64",
"Default": "wordpress",
"MinLength": "1",
"ConstraintDescription": ("must begin with a letter and "
"contain only alphanumeric "
"characters.")
},
"InstanceType": {
"Default": "m1.large",
"Type": "String",
"ConstraintDescription": "must be a valid EC2 instance type.",
"Description": "WebServer EC2 instance type",
"AllowedValues": [
"t1.micro",
"m1.small",
"m1.large",
"m1.xlarge",
"m2.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"c1.medium",
"c1.xlarge",
"cc1.4xlarge"
]
},
"DBRootPassword": {
"Type": "String",
"Description": "Root password for MySQL",
"Default": "admin",
"MinLength": "1",
"AllowedPattern": "[a-zA-Z0-9]*",
"NoEcho": "true",
"MaxLength": "41",
"ConstraintDescription": ("must contain only alphanumeric "
"characters.")
}
}
expected = {
"DBUsername": {
"type": "string",
"description": "The WordPress database admin account username",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 16},
"description": "must begin with a letter and contain "
"only alphanumeric characters."},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": "must begin with a letter and contain "
"only alphanumeric characters."},
]
},
"LinuxDistribution": {
"type": "string",
"description": "Distribution of choice",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"allowed_values": ["F18", "F17", "U10",
"RHEL-6.1", "RHEL-6.2", "RHEL-6.3"]}
]
},
"InstanceType": {
"type": "string",
"description": "WebServer EC2 instance type",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"allowed_values": ["t1.micro",
"m1.small",
"m1.large",
"m1.xlarge",
"m2.xlarge",
"m2.2xlarge",
"m2.4xlarge",
"c1.medium",
"c1.xlarge",
"cc1.4xlarge"],
"description": "must be a valid EC2 instance type."},
]
},
"DBRootPassword": {
"type": "string",
"description": "Root password for MySQL",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "must contain only alphanumeric "
"characters."},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": "must contain only alphanumeric "
"characters."},
]
},
"KeyName": {
"type": "string",
"description": ("Name of an existing EC2 KeyPair to enable "
"SSH access to the instances"),
"required": True,
'update_allowed': True,
'immutable': False,
},
"DBPassword": {
"type": "string",
"description": "The WordPress database admin account password",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "must contain only alphanumeric "
"characters."},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": "must contain only alphanumeric "
"characters."},
]
},
"DBName": {
"type": "string",
"description": "The WordPress database name",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 64},
"description": "must begin with a letter and contain "
"only alphanumeric characters."},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": "must begin with a letter and contain "
"only alphanumeric characters."},
]
},
}
params = dict((n, parameters.Schema.from_dict(n, s)) for n, s
in params_snippet.items())
props_schemata = properties.Properties.schema_from_params(params)
self.assertEqual(expected,
dict((n, dict(s)) for n, s in props_schemata.items()))
def test_schema_from_hot_params(self):
params_snippet = {
"KeyName": {
"type": "string",
"description": ("Name of an existing EC2 KeyPair to enable "
"SSH access to the instances")
},
"InstanceType": {
"default": "m1.large",
"type": "string",
"description": "WebServer EC2 instance type",
"constraints": [
{"allowed_values": ["t1.micro", "m1.small", "m1.large",
"m1.xlarge", "m2.xlarge", "m2.2xlarge",
"m2.4xlarge", "c1.medium", "c1.xlarge",
"cc1.4xlarge"],
"description": "Must be a valid EC2 instance type."}
]
},
"LinuxDistribution": {
"default": "F17",
"type": "string",
"description": "Distribution of choice",
"constraints": [
{"allowed_values": ["F18", "F17", "U10", "RHEL-6.1",
"RHEL-6.2", "RHEL-6.3"],
"description": "Must be a valid Linux distribution"}
]
},
"DBName": {
"type": "string",
"description": "The WordPress database name",
"default": "wordpress",
"constraints": [
{"length": {"min": 1, "max": 64},
"description": "Length must be between 1 and 64"},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": ("Must begin with a letter and contain "
"only alphanumeric characters.")}
]
},
"DBUsername": {
"type": "string",
"description": "The WordPress database admin account username",
"default": "admin",
"hidden": "true",
"constraints": [
{"length": {"min": 1, "max": 16},
"description": "Length must be between 1 and 16"},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": ("Must begin with a letter and only "
"contain alphanumeric characters")}
]
},
"DBPassword": {
"type": "string",
"description": "The WordPress database admin account password",
"default": "admin",
"hidden": "true",
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "Length must be between 1 and 41"},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": ("Must contain only alphanumeric "
"characters")}
]
},
"DBRootPassword": {
"type": "string",
"description": "Root password for MySQL",
"default": "admin",
"hidden": "true",
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "Length must be between 1 and 41"},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": ("Must contain only alphanumeric "
"characters")}
]
}
}
expected = {
"KeyName": {
"type": "string",
"description": ("Name of an existing EC2 KeyPair to enable "
"SSH access to the instances"),
"required": True,
'update_allowed': True,
'immutable': False,
},
"InstanceType": {
"type": "string",
"description": "WebServer EC2 instance type",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"allowed_values": ["t1.micro", "m1.small", "m1.large",
"m1.xlarge", "m2.xlarge", "m2.2xlarge",
"m2.4xlarge", "c1.medium", "c1.xlarge",
"cc1.4xlarge"],
"description": "Must be a valid EC2 instance type."},
]
},
"LinuxDistribution": {
"type": "string",
"description": "Distribution of choice",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"allowed_values": ["F18", "F17", "U10",
"RHEL-6.1", "RHEL-6.2", "RHEL-6.3"],
"description": "Must be a valid Linux distribution"}
]
},
"DBName": {
"type": "string",
"description": "The WordPress database name",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 64},
"description": "Length must be between 1 and 64"},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": ("Must begin with a letter and contain "
"only alphanumeric characters.")},
]
},
"DBUsername": {
"type": "string",
"description": "The WordPress database admin account username",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 16},
"description": "Length must be between 1 and 16"},
{"allowed_pattern": "[a-zA-Z][a-zA-Z0-9]*",
"description": ("Must begin with a letter and only "
"contain alphanumeric characters")},
]
},
"DBPassword": {
"type": "string",
"description": "The WordPress database admin account password",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "Length must be between 1 and 41"},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": ("Must contain only alphanumeric "
"characters")},
]
},
"DBRootPassword": {
"type": "string",
"description": "Root password for MySQL",
"required": False,
'update_allowed': True,
'immutable': False,
"constraints": [
{"length": {"min": 1, "max": 41},
"description": "Length must be between 1 and 41"},
{"allowed_pattern": "[a-zA-Z0-9]*",
"description": ("Must contain only alphanumeric "
"characters")},
]
}
}
params = dict((n, hot_param.HOTParamSchema.from_dict(n, s)) for n, s
in params_snippet.items())
props_schemata = properties.Properties.schema_from_params(params)
self.assertEqual(expected,
dict((n, dict(s)) for n, s in props_schemata.items()))
def test_compare_same(self):
schema = {'foo': {'Type': 'Integer'}}
props_a = properties.Properties(schema, {'foo': 1})
props_b = properties.Properties(schema, {'foo': 1})
self.assertFalse(props_a != props_b)
def test_compare_different(self):
schema = {'foo': {'Type': 'Integer'}}
props_a = properties.Properties(schema, {'foo': 0})
props_b = properties.Properties(schema, {'foo': 1})
self.assertTrue(props_a != props_b)
class PropertiesValidationTest(common.HeatTestCase):
def test_required(self):
schema = {'foo': {'Type': 'String', 'Required': True}}
props = properties.Properties(schema, {'foo': 'bar'})
self.assertIsNone(props.validate())
def test_missing_required(self):
schema = {'foo': {'Type': 'String', 'Required': True}}
props = properties.Properties(schema, {})
self.assertRaises(exception.StackValidationFailed, props.validate)
def test_missing_unimplemented(self):
schema = {'foo': {'Type': 'String', 'Implemented': False}}
props = properties.Properties(schema, {})
self.assertIsNone(props.validate())
def test_present_unimplemented(self):
schema = {'foo': {'Type': 'String', 'Implemented': False}}
props = properties.Properties(schema, {'foo': 'bar'})
self.assertRaises(exception.StackValidationFailed, props.validate)
def test_missing(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {})
self.assertIsNone(props.validate())
def test_unknown_typo(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {'food': 42})
self.assertRaises(exception.StackValidationFailed, props.validate)
def test_list_instead_string(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {'foo': ['foo', 'bar']})
ex = self.assertRaises(exception.StackValidationFailed, props.validate)
self.assertEqual('Property error: foo: Value must be a string',
six.text_type(ex))
def test_dict_instead_string(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {'foo': {'foo': 'bar'}})
ex = self.assertRaises(exception.StackValidationFailed, props.validate)
self.assertEqual('Property error: foo: Value must be a string',
six.text_type(ex))
def test_none_string(self):
schema = {'foo': {'Type': 'String'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_integer(self):
schema = {'foo': {'Type': 'Integer'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_number(self):
schema = {'foo': {'Type': 'Number'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_boolean(self):
schema = {'foo': {'Type': 'Boolean'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_map(self):
schema = {'foo': {'Type': 'Map'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_list(self):
schema = {'foo': {'Type': 'List'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_string(self):
schema = {'foo': {'Type': 'String', 'Default': 'bar'}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_integer(self):
schema = {'foo': {'Type': 'Integer', 'Default': 42}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_number(self):
schema = {'foo': {'Type': 'Number', 'Default': 42.0}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_boolean(self):
schema = {'foo': {'Type': 'Boolean', 'Default': True}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_map(self):
schema = {'foo': {'Type': 'Map', 'Default': {'bar': 'baz'}}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_none_default_list(self):
schema = {'foo': {'Type': 'List', 'Default': ['one', 'two']}}
props = properties.Properties(schema, {'foo': None})
self.assertIsNone(props.validate())
def test_schema_to_template_nested_map_map_schema(self):
nested_schema = {'Key': {'Type': 'String',
'Required': True},
'Value': {'Type': 'String',
'Default': 'fewaf'}}
schema = {'foo': {'Type': 'Map', 'Schema': nested_schema}}
prop_expected = {'foo': {'Ref': 'foo'}}
param_expected = {'foo': {'Type': 'Json'}}
(parameters,
props) = properties.Properties.schema_to_parameters_and_properties(
schema)
self.assertEqual(param_expected, parameters)
self.assertEqual(prop_expected, props)
def test_schema_to_template_nested_map_list_map_schema(self):
key_schema = {'bar': {'Type': 'Number'}}
nested_schema = {'Key': {'Type': 'Map', 'Schema': key_schema},
'Value': {'Type': 'String',
'Required': True}}
schema = {'foo': {'Type': 'List', 'Schema': {'Type': 'Map',
'Schema': nested_schema}}}
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
(parameters,
props) = properties.Properties.schema_to_parameters_and_properties(
schema)
self.assertEqual(param_expected, parameters)
self.assertEqual(prop_expected, props)
def test_schema_object_to_template_nested_map_list_map_schema(self):
key_schema = {'bar': properties.Schema(properties.Schema.NUMBER)}
nested_schema = {
'Key': properties.Schema(properties.Schema.MAP, schema=key_schema),
'Value': properties.Schema(properties.Schema.STRING, required=True)
}
schema = {
'foo': properties.Schema(properties.Schema.LIST,
schema=properties.Schema(
properties.Schema.MAP,
schema=nested_schema))
}
prop_expected = {'foo': {'Fn::Split': [",", {'Ref': 'foo'}]}}
param_expected = {'foo': {'Type': 'CommaDelimitedList'}}
(parameters,
props) = properties.Properties.schema_to_parameters_and_properties(
schema)
self.assertEqual(param_expected, parameters)
self.assertEqual(prop_expected, props)
def test_schema_invalid_parameters_stripped(self):
schema = {'foo': {'Type': 'String',
'Required': True,
'Implemented': True}}
prop_expected = {'foo': {'Ref': 'foo'}}
param_expected = {'foo': {'Type': 'String'}}
(parameters,
props) = properties.Properties.schema_to_parameters_and_properties(
schema)
self.assertEqual(param_expected, parameters)
self.assertEqual(prop_expected, props)
def test_schema_support_status(self):
schema = {
'foo_sup': properties.Schema(
properties.Schema.STRING,
default='foo'
),
'bar_dep': properties.Schema(
properties.Schema.STRING,
default='bar',
support_status=support.SupportStatus(
support.DEPRECATED,
'Do not use this ever')
)
}
props = properties.Properties(schema, {})
self.assertEqual(support.SUPPORTED,
props.props['foo_sup'].support_status().status)
self.assertEqual(support.DEPRECATED,
props.props['bar_dep'].support_status().status)
self.assertEqual('Do not use this ever',
props.props['bar_dep'].support_status().message)
def test_nested_properties_schema_invalid_property_in_list(self):
child_schema = {'Key': {'Type': 'String',
'Required': True},
'Value': {'Type': 'Boolean',
'Default': True}}
list_schema = {'Type': 'Map', 'Schema': child_schema}
schema = {'foo': {'Type': 'List', 'Schema': list_schema}}
valid_data = {'foo': [{'Key': 'Test'}]}
props = properties.Properties(schema, valid_data)
self.assertIsNone(props.validate())
invalid_data = {'foo': [{'Key': 'Test', 'bar': 'baz'}]}
props = properties.Properties(schema, invalid_data)
ex = self.assertRaises(exception.StackValidationFailed,
props.validate)
self.assertEqual('Property error: foo[0]: Unknown Property bar',
six.text_type(ex))
def test_nested_properties_schema_invalid_property_in_map(self):
child_schema = {'Key': {'Type': 'String',
'Required': True},
'Value': {'Type': 'Boolean',
'Default': True}}
map_schema = {'boo': {'Type': 'Map', 'Schema': child_schema}}
schema = {'foo': {'Type': 'Map', 'Schema': map_schema}}
valid_data = {'foo': {'boo': {'Key': 'Test'}}}
props = properties.Properties(schema, valid_data)
self.assertIsNone(props.validate())
invalid_data = {'foo': {'boo': {'Key': 'Test', 'bar': 'baz'}}}
props = properties.Properties(schema, invalid_data)
ex = self.assertRaises(exception.StackValidationFailed,
props.validate)
self.assertEqual('Property error: foo.boo: Unknown Property bar',
six.text_type(ex))
def test_more_nested_properties_schema_invalid_property_in_list(self):
nested_child_schema = {'Key': {'Type': 'String',
'Required': True}}
child_schema = {'doo': {'Type': 'Map', 'Schema': nested_child_schema}}
list_schema = {'Type': 'Map', 'Schema': child_schema}
schema = {'foo': {'Type': 'List', 'Schema': list_schema}}
valid_data = {'foo': [{'doo': {'Key': 'Test'}}]}
props = properties.Properties(schema, valid_data)
self.assertIsNone(props.validate())
invalid_data = {'foo': [{'doo': {'Key': 'Test', 'bar': 'baz'}}]}
props = properties.Properties(schema, invalid_data)
ex = self.assertRaises(exception.StackValidationFailed,
props.validate)
self.assertEqual('Property error: foo[0].doo: Unknown Property bar',
six.text_type(ex))
def test_more_nested_properties_schema_invalid_property_in_map(self):
nested_child_schema = {'Key': {'Type': 'String',
'Required': True}}
child_schema = {'doo': {'Type': 'Map', 'Schema': nested_child_schema}}
map_schema = {'boo': {'Type': 'Map', 'Schema': child_schema}}
schema = {'foo': {'Type': 'Map', 'Schema': map_schema}}
valid_data = {'foo': {'boo': {'doo': {'Key': 'Test'}}}}
props = properties.Properties(schema, valid_data)
self.assertIsNone(props.validate())
invalid_data = {'foo': {'boo': {'doo': {'Key': 'Test', 'bar': 'baz'}}}}
props = properties.Properties(schema, invalid_data)
ex = self.assertRaises(exception.StackValidationFailed,
props.validate)
self.assertEqual('Property error: foo.boo.doo: Unknown Property bar',
six.text_type(ex))
def test_schema_to_template_empty_schema(self):
schema = {}
(parameters,
props) = properties.Properties.schema_to_parameters_and_properties(
schema)
self.assertEqual({}, parameters)
self.assertEqual({}, props)
def test_update_allowed_and_immutable_contradict(self):
schema = {'foo': properties.Schema(
properties.Schema.STRING,
update_allowed=True,
immutable=True)}
props = properties.Properties(schema, {})
self.assertRaises(exception.InvalidSchemaError, props.validate)
| steveb/heat | heat/tests/test_properties.py | Python | apache-2.0 | 74,955 |
# Generated by Django 2.1.8 on 2019-04-12 01:18
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
import django.utils.timezone
import gwells.db_comments.model_mixins
import submissions.data_migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='WellActivityCode',
fields=[
('create_user', models.CharField(max_length=60)),
('create_date', models.DateTimeField(default=django.utils.timezone.now)),
('update_user', models.CharField(max_length=60)),
('update_date', models.DateTimeField(default=django.utils.timezone.now)),
('display_order', models.PositiveIntegerField()),
('effective_date', models.DateTimeField(default=django.utils.timezone.now)),
('expiry_date', models.DateTimeField(default=datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=utc))),
('code', models.CharField(db_column='well_activity_type_code', editable=False, max_length=10, primary_key=True, serialize=False)),
('description', models.CharField(max_length=100)),
],
options={
'db_table': 'well_activity_code',
'ordering': ['display_order', 'description'],
},
bases=(models.Model, gwells.db_comments.model_mixins.DBComments),
),
migrations.RunPython(
code=submissions.data_migrations.load_well_activity_codes,
reverse_code=submissions.data_migrations.unload_well_activity_codes,
),
]
| bcgov/gwells | app/backend/submissions/migrations/0001_initial.py | Python | apache-2.0 | 1,712 |
# -*- coding: utf-8; -*-
import sys
from Tkinter import*
import os
import symbols
import draft_gui
import calc
import get_conf
import get_object
import param_edit
import select_clone
import trace
import trace_object
import save_file
import undo_redo
import to_dxf
import to_svg
import from_dxf
import from_svg
import copy_prop
import trim_extend
import trim_dim
import fillet
import edit
import offset
import scale_object
import rotate_object
import mirror_object
import move_object
import copy_object
import grab_object
import print_ps
import line
import dimension
import text_line
import circle
import arc
import math
import time
import tkFileDialog
import tkMessageBox
from random import randint
import codecs
import copy
from shutil import copyfile
#font = 'Arial'
zoomm = 0.8
zoomp = 1.0/0.8
class Graphics:
def __init__(self):
self.appPath = os.getcwd()
#переменные для рисования
self.zoomOLDres = 0
self.ex = 0.0
self.ey = 0.0
self.ex2 = 0.0
self.ey2 = 0.0
self.ex3 = 0.0
self.ey3 = 0.0
self.min_e = 0.00001 #Минимальная величина чертежа
#переменные для отображениия
self.zoomOLD = 0
self.sloy = '1' #Текущий слой
self.color = 'white' #Текущий цвет
self.width = 2 #Текущая толщина
self.stipple = None
self.stipple_size = 200 #размер типа линий
self.select_color = 'green' #Цвет выделяемых объектов
self.priv_color = 'red' #Цвет привязки
self.fon_color = 'black'
self.left_color = 'light blue'
self.right_color = 'red'
self.size_t=-500 #Текущий размер шрифта текста (5 мм)
self.size_f=-350 #Текущий размер шрифта размеров (3.5 мм)
self.size_simbol_p = 10 #Размер значка привязки
self.anchor = 'sw' #Текущая привязка текста
self.font = 'Architectural'
self.s_s = 1.2 #Межбуквенное расстояние
self.w_text = 1 #Ширина буквы
self.s = 50 #Переменная, определяющая пропорции в размерах
self.arrow_s = 200
self.vr_s = 200
self.vv_s = 200
self.type_arrow = 'Arch'
self.s_s_dim = 1.3
self.w_text_dim = 1
self.font_dim = 'Architectural'
self.snap_s = 10 #Определяет дальнобойность привязки (расстояние в точках на экране)
self.angle_s = 15.0
self.auto_save_step = 30 #Количество действий между автосохранениями
#Типы линий
self.stipples = {
'_____________':None,
'_ _ _ _ _ _ _':(1,1),
'____ _ ____ _':(4,1,1,1),
'____ _ _ ____':(4,1,1,1,1,1),
}
self.old_func = 'self.copyEvent()'
self.prog_version = 'SAMoCAD - v0.0.8.5 alpha'
self.old_text = self.prog_version
self.old_offset = 0
self.old_fillet_R = 0
self.old_scale = 1
self.old_print_scale = 100.0
self.Old_sel = None
self.ortoFlag=False #Если True - значит орто вкл
self.trace_on = False
self.trace_obj_on = False
self.tracingFlag = True
self.tracing_obj_Flag = True
self.snap_near = True
self.lappingFlag = False #Если True - значит активен квадрат выделения
self.resFlag = False #Если True - значит рисуем
self.anchorFlag = False #Если True - режим выбора привязки текста
self.saveFlag = False
self.changeFlag = False
self.current_file = 'New draft'
self.s_dxf = False
self.curent_class = None
self.unpriv = False
self.edit_clone = False
self.move_clone = False
self.mirror_clone = False
self.rotate_clone = False
self.edit_dim_clone = False
self.copy_clone = False
self.line_clone = False
self.circle_clone = False
self.arc_clone = False
self.dim_clone = False
self.dimR_clone = False
self.edit_dim_text_clone = False
self.trim_dim_clone = False
self.enumerator = 0
self.com=None #переменная команды
self.colorC = None #Запоминает цвет объекта, когда на него наезжает курсор
self.rect = None #Прямоугольник выделения
self.priv_coord = (0,0) #Текущая точка привязки
self.x_priv = 0 #Координаты привязок
self.y_priv = 0
self.tip_p = '' #тип привязки
self.Ndimd = 0 #Количество размеров
self.Nlined = 0 #Количество линий
self.Ncircled = 2 #Количество кругов
self.Ntextd = 0 #Количество текстовых строк
self.Narcd = 0 #Количество дуг
self.Ncloned = 0
self.Ndimrd = 0
self.Ndim = ''
self.Nline = ''
self.Ntext = ''
self.Ncircle = ''
self.Narc = ''
self.Ndimr = ''
self.Nclone = ''
self.func_collection = [] #Объекты из коллекции, над которыми уже было проведено действие
self.collection = [] #Выделенные объекты
self.find_privs = [] #Набор объектов-привязок
self.collectionBack = [] #Сброшенный набор объектов
self.temp_collection = []
self.temp_lines_list = []
self.ALLOBJECT = {} #ВСЕ объекты (Объект : {параметр : значение}}
self.all_clone = {}
self.history_undo = [] #Список событий
#self.history_redo = [] #Список событий
def initial(self, master1):#Создает GUI
draft_gui.gui = draft_gui.Gui(master1, graf)
self.master1 = draft_gui.gui.master1
self.dialog = draft_gui.gui.dialog
self.command = draft_gui.gui.command
self.info = draft_gui.gui.info
self.button_orto = draft_gui.gui.button_orto
self.button_trace = draft_gui.gui.button_trace
self.button_trace_obj = draft_gui.gui.button_trace_obj
self.button_snap_N = draft_gui.gui.button_snap_N
self.frame1 = draft_gui.gui.frame1
self.c = draft_gui.gui.canvas
#Начало коорданат
self.nachCoordy = self.c.create_line(10,10,100,10,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(100,10,80,5,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(100,10,80,15,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.nachCoordx = self.c.create_line(10,10,10,100,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(10,100,5,80,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#self.c.create_line(10,100,15,80,fill='white',width=3,tags=['line', 'obj'], state = HIDDEN)
#Перехват закрытия окна
self.col = 0
self.master1.protocol('WM_DELETE_WINDOW', self.exitMethod)
#События
self.master1.bind_class(self.c,"<MouseWheel>", self.Mzoommer)#Windows OS
self.master1.bind_class(self.c,'<Button-4>', self.Mzoommer)#Linux OS
self.master1.bind_class(self.c,'<Button-5>', self.Mzoommer)#Linux OS
self.c.bind_class(self.master1,"<B2-Motion>", self.mouseMove)
self.c.bind_class(self.master1,"<2>", self.OnMouseMove)
self.c.bind_class(self.c,"<Motion>", self.gpriv)
#self.c.tag_bind('t_LOD', '<Button-3>', self.editText)
#self.c.tag_bind('dim_text_priv', '<Button-3>', self.editDimTextPlace)
self.c.bind_class(self.master1, "<Control-Button-3>", self.BackCol)
self.c.bind('<Button-3>', self.edit_butt_3)
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.c.bind_class(self.master1, "<Delete>", self.delete)
self.c.bind_class(self.master1, "<Escape>", self.kill)
self.c.bind_class(self.master1, "<Return>", self.old_function)
#Горячие клавиши
self.c.bind_class(self.master1, "<Control-KeyPress-x>", self.mirrorEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-z>", self.copyEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-a>", self.moveEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-s>", self.rotateEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-l>", self.ort)
self.c.bind_class(self.master1, "<Control-KeyPress-e>", self.tt)
self.c.bind_class(self.master1, "<Control-KeyPress-d>", self.copy_prop)
self.c.bind_class(self.master1, "<Control-KeyPress-q>", self.trimEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-w>", self.extendEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-r>", self.scaleEvent)
self.c.bind_class(self.master1, "<Control-KeyPress-p>", self.print_postScript)
self.c.bind_class(self.master1, "<Control-KeyPress-o>", self.fileOpen)
self.c.bind_class(self.master1, "<Control-KeyPress-n>", self.new)
self.c.bind_class(self.master1, "<Control-KeyPress-m>", self.trim_dim)
self.c.bind_class(self.master1, "<F1>", draft_gui.gui.obj_prop)
self.set_coord()
j = 0 #Сделать масштаб нормальным (-20х)
while j < 20:
self.zoommerM()
j+=1
def tt(self, event):
for i in self.ALLOBJECT:
print ('______________________________')
print (i, self.ALLOBJECT[i])
#print self.ALLOBJECT
#print self.collection
#print self.ALLOBJECT
#print self.ALLOBJECT[self.collection[0]]['text_change']
#print self.temp_lines_list
#print '_______'
#print 'undo', self.history_undo
#print 'redo', self.history_redo
def undo(self, event = None):
self.kill()
if self.history_undo:
undo_redo.undo(self.history_undo[-1], graf)
'''
def redo(self, event = None):
self.kill()
if self.history_redo:
undo_redo.redo(self.history_redo[-1], graf)
'''
#РЕДАКТИРОВАНИЕ ОБЪЕКТОВ
def old_function(self, event):#При нажатии Enter вне режима рисования - вызывает последнюю вызванную функцию
exec(self.old_func)
#ПРОДОЛЖЕНИЕ РАЗМЕРНОЙ ЛИНИИ
def trim_dim(self, event = None):
self.curent_class = trim_dim.Trim_dim(graf)
#КОПИРОВАНИЕ СВОЙСТВ
def copy_prop(self, event = None):
self.curent_class = copy_prop.Copy_prop(graf)
#ОБРЕЗКА/УДЛИНЕНИЕ ЛИНИЙ
def trimEvent(self, event = None):
self.trim_extend = 'Trim'
self.curent_class = trim_extend.Trim_extent(graf)
self.old_func = 'self.trimEvent()'
def extendEvent(self, event = None):
self.trim_extend = 'Extend'
self.curent_class = trim_extend.Trim_extent(graf)
self.old_func = 'self.extendEvent()'
#ИЗМЕНЕНИЕ ПАРАМЕТРОВ ВЫДЕЛЕННЫХ ОБЪЕКТОВ ПРИ СМЕНЕ ЗНАЧЕНИЯ В НАСТРОЙКАХ
def param_edit(self, params):
param_edit.Param_edit(graf, params)
#СОБЫТИЯ 3 КН МЫШИ
def edit_butt_3(self, event):
el = get_object.get_obj(event.x, event.y, graf, ('dim', 'text'))
if el:
self.kill()
#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ex = self.priv_coord[0]
self.ey = self.priv_coord[1]
if el[0] == 'd':
self.editDimTextPlace(el)
elif el[0] == 't':
self.editText(el)
#РЕДАКТИРОВАНИЕ МЕСТОПОЛОЖЕНИЯ ТЕКСТА РАЗМЕРОВ
def editDimTextPlace(self, el):
if self.tip_p == 'c':
self.ex = self.priv_coord[0]#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ey = self.priv_coord[1]
self.ex3,self.ey3 = self.ex,self.ey
self.dialog.config(text = 'Move dim text - new point:')
self.info.config(text = 'Escape - stop')
self.resFlag = True
self.c.bind_class(self.master1,"<Return>", self.kill)
self.c.bind('<Button-1>', self.editDimTextPlace2)
self.c.unbind('<Button-3>')
self.c.unbind('<Shift-Button-1>')
self.set_coord()
self.collection.append(el,)
select_clone.Select_clone([el,], graf)
self.Old_sel = None
self.edit_dim_text_clone = True
def editDimTextPlace2(self, event = None):
x1, y1, x2, y2, x3, y3, ort, size, fill, text, sloy, text_change, text_place, s, vr_s, vv_s, arrow_s, type_arrow, s_s_dim, w_text_dim, font_dim = get_conf.get_dim_conf(self.collection[0], graf)
self.ex2 = self.priv_coord[0]#Получить координаты из списка координат привязок (их рассчитывает gpriv)
self.ey2 = self.priv_coord[1]
self.ex,self.ey = self.coordinator(self.ex,self.ey)
s2 = self.coordinator2(s)
line3 = self.c.coords(self.get_snap_line(self.collection[0])[2])
if ort == 'vertical' and abs(self.ey2-y3) <= s2*2.0:
self.ey2 = y3-s
text_change = 'online3'
if x1<self.ex2<x2 or x2<self.ex2<x1:
text_change = 'online3_m_l'
elif ort == 'horizontal' and abs(self.ex2-x3) <= s2*2.0:
self.ex2 = x3+s
text_change = 'online3'
if y1<self.ey2<y2 or y2<self.ey2<y1:
text_change = 'online3_m_l'
else:
text_change = 'changed'
text_place = [self.ex2, self.ey2]
if event:
self.c.delete(self.collection[0])
dimension.c_dim(graf, x1, y1, x2, y2, x3, y3, text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s_dim,
w_text_dim,
font_dim,
ID = self.collection[0])
self.changeFlag = True
self.kill()
else:
self.set_coord()
dimension.c_dim(graf, x1, y1, x2, y2, x3, y3, text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s_dim,
w_text_dim,
font_dim,
temp = 'Yes')
self.ex3 = self.ex2
self.ey3 = self.ey2
def dim_text_place(self, Num):#Принимает объект - размер, возвращает кортеж приметивов его текста, линию привязки текста, координату привязки
objs = self.ALLOBJECT[Num]['id']
text_lines = []
for i in objs:
tag = self.ALLOBJECT[Num]['id'][i]
if 'dim_text' in tag:
text_lines.append(i)
if 'dim_text_priv' in tag:
priv_line = i
text_p = self.c.coords(priv_line)
text_place = []
text_place1 = (text_p[0] + text_p[2]) / 2.0
text_place2 = (text_p[1] + text_p[3]) / 2.0
if text_place1 == text_p[0]:
text_place.append(text_p[0])
text_place.append(text_place2)
text_place.append('vert')
else:
text_place.append(text_place1)
text_place.append(text_p[1])
text_place.append('hor')
return text_lines, priv_line, text_place
#РЕДАКТИРОВАНИЕ ТЕКСТА
def editText(self, Num):
self.dialog.config(text = 'Edit text:')
self.info.config(text = 'Enter - apply. Escape - stop')
self.command.delete(0, END)
self.collection.append(Num)
select_clone.Select_clone([Num,], graf)
text = self.ALLOBJECT[self.collection[0]]['text']
self.command.insert(0, text)
self.c.bind_class(self.master1, "<Return>", self.editText2)
self.command.focus_set()
self.Old_sel = None
def editText2(self, event):
fill, text, sloy, angle, anchor, size, line, coord, s_s, w_text, font = self.get_text_conf(self.collection[0])
text = self.command.get()
self.delete(elements = (self.collection[0],))
text_line.c_text(graf, coord[0], coord[1], text = text, size = size, anchor = anchor, sloy = sloy, fill = fill, angle = angle, s_s = s_s, w_text = w_text, font = font)
self.collection = []
self.changeFlag = True
self.enumerator_p()
self.kill()
#ДАТЬ ПАРАМЕТРЫ ОБЪЕКТА
def get_conf(self, obj):
return get_conf.get_conf(obj, graf)
def get_circle_conf(self, obj):
return get_conf.get_circle_conf(obj, graf)
def get_arc_conf(self, obj):
return get_conf.get_arc_conf(obj, graf)
def get_line_conf(self, obj):
return get_conf.get_line_conf(obj, graf)
def get_line_coord(self, obj):
return get_conf.get_line_coord(obj, graf)
def get_text_conf(self, obj):
return get_conf.get_text_conf(obj, graf)
def get_dim_conf(self, obj):
return get_conf.get_dim_conf(obj, graf)
def get_dimR_conf(self, obj):
return get_conf.get_dimR_conf(obj, graf)
#ИЗИЕНЕНИЕ УЗЛОВ
def editEvent(self, event):
self.curent_class = edit.Edit_node(graf)
#СОПРЯЖЕНИЕ
def filletEvent(self, event=None):
self.curent_class = fillet.Fillet(graf)
#ДАТЬ ПРИМИТИВ БЛИЖАЙШИЙ К ТОЧКЕ
def get_obj(self, x, y, t_obj = 'line'):
return get_object.get_obj(x, y, graf, t_obj)
#СМЕЩЕНИЕ
def offsetEvent(self, event=None):
self.curent_class = offset.Offset(graf)
#МАСШТАБИРОВАНИЕ ОБЪЕКТОВ
def scaleEvent(self, event=None):
self.curent_class = scale_object.Scale_object(graf)
#ВРАЩЕНИЕ
def rotateEvent(self, event=None):
self.curent_class = rotate_object.Rotate_object(graf)
#ЗЕРКАЛО (не применятеся к сложным объектам, содержащим текст)
def mirrorEvent(self, event=None):
self.curent_class = mirror_object.Mirror_object(graf)
#ПЕРЕМЕЩЕНИЕ
def moveEvent(self, event=None):
self.curent_class = move_object.Move_object(graf)
#КОПИРОВАНИЕ
def copyEvent(self,event=None):
self.curent_class = copy_object.Copy_object(graf)
#ВЫДЕЛЕНИЕ
def lapping_sel(self,event):
grab_object.lapping2(graf, select = 'select')
#СНЯТИЕ ВЫДЕЛЕНИЯ
def lapping_desel(self, event):
grab_object.lapping2(graf, select = 'deselect')
def resRect(self, event):
self.rectx2=event.x
self.recty2=event.y
self.priv_coord = (self.rectx2, self.recty2)
self.rectx,self.recty = self.coordinator(self.rectx,self.recty)
self.set_coord()
if self.rectx2<self.rectx:#Цвет зависит от координат x
color = self.left_color
else:
color = self.right_color
if self.rect:
self.c.coords(self.rect, self.rectx, self.recty, self.rectx2, self.recty2)
self.c.itemconfig(self.rect, outline = color)
else:
self.rect=self.c.create_rectangle(self.rectx, self.recty, self.rectx2, self.recty2, fill=None,outline=color, tags=['line', 'obj', 'rect'])#Нарисовать заново по новым координатам
def set_coord(self):
self.xynachres=self.c.coords(self.nachCoordy)
self.zoomOLDres = self.zoomOLD
def delete(self, event=None, elements = None, add_history = None): #Уделение объектов
def dele(i, h = None):#Удаляет пришедший объект с канваса и из ALLOBJECT
if h:
e = self.get_conf(i)
self.e_list.append(e)
self.c.delete(i)
del self.ALLOBJECT[i]
if ('c_', i) in self.history_undo:
self.history_undo.remove(('c_', i))
t1 = time.time()
if elements == None:#Если не заданы элементы для удаления
self.set_coord()
self.e_list = []
map(lambda x: dele(x, h = 'add'), self.collection)#Перебрать коллекцию
self.collection = []
self.history_undo.append(('delete', (self.e_list, self.xynachres, self.zoomOLDres)))
self.changeFlag = True
self.enumerator_p()
self.kill()
else:#Если заданы элементы для удаления
map(dele, elements)
t2 = time.time()
print ('delete', t2-t1)
def sbros(self):#Сбрасывает коллекцию - переводит список веделенных объектов в collectionBack.
t1 = time.time()
self.collectionBack = self.collection
self.c.delete('clone')
self.collection = []
t2 = time.time()
print ('sbros', t2-t1)
def BackCol(self, event):#core-feature!!! - Возвращает в коллекцию предыдущий набор
if self.resFlag == False and (not self.collection):#Если начего не рисуется и коллекция не пуста
def BC(i):
if i in self.ALLOBJECT:#Если объект есть в обхем списке (не был удален)
self.collection.append(i)#Добавить в коллекцию
print (111)
map(BC, self.collectionBack)#Перебрать старую коллекцию
select_clone.Select_clone(self.collection, graf)
self.colObj()#Посчитать колличество выделенных объектов
draft_gui.gui.update_prop()
def colObj(self):#Пишет информацию о количестве выбранных объектов
if self.collection:
self.info.config(text = ('Selected %s objects') %(len(self.collection)))
else:
self.info.config(text ='')
def back_color(self, color, obj):
if obj[0] in ['c', 'a']:
for i in self.ALLOBJECT[obj]['id']:
tag = self.ALLOBJECT[obj]['id'][i]
if 'line' in tag:
self.c.itemconfig(i, fill = color)
if 'cir' in tag or 'a' in tag:
self.c.itemconfig(i, outline = color)
else:
self.c.itemconfig(obj, fill = color)
def collektor_sel(self, event):
x = event.x
y = event.y
self.collektor(x, y, select = 'select')
def collektor_desel(self, event):
x = event.x
y = event.y
self.collektor(x, y, select = 'deselect')
def collektor(self, x, y, select):#Добавляет в коллекцию объект, приметивы которого в активном состоянии (находятся под курсором)
#Получить номер объекта по текущему активному приметиву
Num = get_object.get_obj(x, y, graf, 'all')
#Если не нажат Shift
if select == 'select':
#Если объект отсутствует в коллекции - добавить, сменить цвет
if Num not in self.collection and Num in self.ALLOBJECT:
self.collection.append(Num)
select_clone.Select_clone((Num,), graf)
self.Old_sel = None
#Если нажат Shift
else:
#Если объект в коллекции - вырвать, вернуть цвет
if Num in self.collection:
self.collection.remove(Num)
self.c.delete('C'+Num)
draft_gui.gui.update_prop()
#Сосчитать колличество выделенных объектов
self.colObj()
def mass_collektor(self, mass, select):#Добавляет в коллекцию объекты из массы приметивов
t1 = time.time()
old_col = self.collection
if select == 'select':#Если дабавить
append_list = []#Заместо коллекции
gettags = self.c.gettags
append = append_list.append
for content in mass:
Num = gettags(content)[1]#Получить номер объекта по приметиву
if Num not in self.collection and Num not in append_list and Num[0] != 'C':#Если объект отсутствует в коллекции - добавить, сменить цвет
append(Num)
select_clone.Select_clone(append_list, graf)
self.collection.extend(append_list)
else: #Если вырвать
delete_list = []
for content in mass:
Num = self.c.gettags(content)[1]#Получить номер объекта по приметиву
if Num in self.collection and Num not in delete_list and Num[0] != 'C':#Если объект в коллекции - вырвать из нее, вернуть цвет
#Если объекта нет в списке удаления
delete_list.append(Num)
#перебрать delete_list, удалить все его объекты из коллекции
for i in delete_list:
self.collection.remove(i)
self.c.delete('C'+i)
if old_col != self.collection:
draft_gui.gui.update_prop()
t2 = time.time()
print ('mass_collektor', t2-t1)
def edit_collektor(self, edit_mass): #Добавляет в коллекцию объекты из массы приметивов, если в массе есть размеры - то остальные объекты не попадут в коллекцию
prov = True #True, пока не попался размер
append_list = []
for content in edit_mass:
non_ap = False
Num = self.c.gettags(content)[1]#Получить номер объекта по приметиву
if Num not in append_list and Num[0] != 'C':
if Num[0] in ('d', 'r'):
prov = False
if Num[0] == 'r':
line1 = self.get_snap_line(Num)[0]
c = self.c.coords(line1) #get_conf.get_line_coord(line1, graf)#
xc = c[0]
yc = c[1]
if (xc, yc) == (self.ex, self.ey):
non_ap = True
elif Num[0] == 'c':
x0, y0, R, fill, width, sloy = get_conf.get_circle_conf(Num, graf)
if (x0, y0) == (self.ex, self.ey):
non_ap = True
elif Num[0] == 'a':
xc, yc, dx1, dy1, dx2, dy2, fill, width, sloy = get_conf.get_arc_conf(Num, graf)
if (xc, yc) == (self.ex, self.ey):
non_ap = True
if non_ap == False:
append_list.append(Num)
select_clone.Select_clone(append_list, graf)
if self.Old_sel in append_list:
self.Old_sel = None
self.collection.extend(append_list)
if self.tip_p == 'c' and prov == True and len(self.collection)==1:#Если объект 1, это линия и привязка к середине
return 'line_c'#Включит режим Move
else:
return 'another'#Включит режим Edit
def edit_c(self, edit_mass): #Проверяет, какие объекты находятся в коллекции - если только размеры по линии - оставляет коллекцию неизменной, если есть другие объекты - оставляет в кол. только те, к которым есть привязка в данный момент
delete_list = []#Список объектов из коллекции, к которым привязка нет
dim_list = []#Список размеров из коллекции
line_dim_edit = True#Будет True - пока не попался НЕразмер
for content in edit_mass:#Перебрать пришедшую коллекцию
if content[0] == 'd':#Если объект == размер
dim_list.append(content)#Добавить в список размеров
else:
line_dim_edit = False#Иначе неразмер попался
undel_obj = False#Если False - убрать объект из коллекции
find = self.ALLOBJECT[content]['id']#self.c.find_withtag(content)#Получить приметивы объекта
for i in find:#Перебрать их
if i in self.find_privs2:#Если приметив в списке приметивов - привязок
undel_obj = True#Оставить объект в коллекции
if undel_obj == False:#Если не удалять - False
delete_list.append(content)#Добавить объект в список удаления
self.c.delete('C'+content)
map(lambda i: self.collection.remove(i), delete_list)#перебрать delete_list, удалить все его объекты из коллекции
#core-feature!!! - определяет, по одной линии все размеры или нет. Если да - можно перенести всю размерную цепочку
if line_dim_edit == True:#Если ни одного неразмера не попалось
if len(dim_list) > 1:#Если количество размеров > 1
line3_list = []#Список первых координат размерных линий размеров
ort1 = None#ориентация первого размера
ort2 = None#То же второго
bFlag = False#Если False - то все размерные линии имеют одну общую координату (x или y) и лежат по одной линии
for i in dim_list:# Перебрать список размеров
if dim_list.index(i) == 0: #Если размер первый в списке
ort1 = self.ALLOBJECT[i]['ort']#Присвоить его ориентацию первой переменной
else:
ort2 = self.ALLOBJECT[i]['ort']#Иначе второй
if ort1 != ort2:#Если переменные не равны - Вылететь, коллекцию больше не изменять
bFlag = True
break
line3 = self.get_snap_line(i)[2]#Взять размерную линию размера
coord = self.c.coords(line3)#Взять координаты размерной линии
line3_list.append(coord[0:2])#Добавить в список координат только 2 первые координаты
if bFlag == False:#Если Вылетания не произошло
for ind, i in enumerate(line3_list):#Перебрать список координат
if ort1 == 'vertical':#Если оринтация вертикальная
if i == line3_list[-1]:#Если элемент последний в списке
ii = -1#Второй элемент - взять предыдущий
else:
ii = 1#Иначе - последующий
if i[1] != line3_list[ind + ii][1]:#Если координата y второго не равна y первого - Вылететь, коллекцию больше не изменять
bFlag = True
break
else:
if i == line3_list[-1]:
ii = -1
else:
ii = 1
if i[0] != line3_list[ind + ii][0]:#Если координата x второго не равна x первого - Вылететь, коллекцию больше не изменять
bFlag = True
break
if bFlag == False:#Если вылетания и теперь не произошло
self.collection = dim_list#Коллекция = списку размеров
for i in self.collection:#Поменять цвет размеров
self.c.delete('C'+i)
select_clone.Select_clone(self.collection, graf)
def colorer(self, event):#действие при наезжании курсора на приметив
Num = self.get_obj(event.x, event.y, 'all')
if Num not in self.collection and Num in self.ALLOBJECT and Num not in ('trace', 'trace_o'):#Если объект отсутствует в коллекции - сменить цвет, включить флаг
select_clone.Select_clone((Num,), graf)
if self.resFlag == False:#Если ничего не рисуется - выключить действия lapping
self.c.unbind('<Button-1>')
self.c.unbind('<Shift-Button-1>')
def colorerL(self, event=None):#действие при уходн курсора с приметива
Num = self.get_obj(event.x, event.y, 'all')
if Num not in self.collection and Num in self.ALLOBJECT:#Если объект не в коллекции, вернуть цвет
if Num in self.ALLOBJECT:
self.c.delete(self.all_clone['C'+Num])
del self.all_clone['C'+Num]
if self.resFlag == False:
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
def m_coordinator(self, arg, zoomOLDres): #Переводит расстояния момента при zoomOLDres в расстояния сейчас
if self.zoomOLD != zoomOLDres:
r = -self.zoomOLD+zoomOLDres
if self.zoomOLD>zoomOLDres:
arg *= (zoomm**r)
else:
arg *= zoomp**(-r)
return arg
def n_coordinator(self, arg): #Переводит расстояния момента при zoomOLDres в насстоящие расстояния
if self.zoomOLD>0:
arg = arg*zoomm**self.zoomOLD
else:
zoomOLDx = self.zoomOLD*(-1)
arg = arg*zoomp**zoomOLDx
return arg
def coordinator(self,x,y,zoomOLDres = None, xynachres = None):#Пересчитывает координаты если был изменен зум или перемещен экран
xynach=self.c.coords(self.nachCoordy)
if zoomOLDres == None:
zoomOLDres = self.zoomOLDres
xynachres = self.xynachres
if self.zoomOLD == zoomOLDres:
dx=xynach[0]-xynachres[0]
dy=xynach[1]-xynachres[1]
else:
r=-self.zoomOLD+zoomOLDres
if self.zoomOLD>zoomOLDres:
x *= zoomm**r
y *= zoomm**r
dx = xynach[0] - xynachres[0] * zoomm**r
dy = xynach[1] - xynachres[1] * zoomm**r
else:
x *= zoomp**(-r)
y *= zoomp**(-r)
dx = xynach[0] - xynachres[0] * zoomp**(-r)
dy = xynach[1] - xynachres[1] * zoomp**(-r)
x = dx + x
y = dy + y
return x,y
def coordinator2(self,arg):#Переводит действительные расстояния в расстояния сейчас
if self.zoomOLD>0:
arg *= zoomp**self.zoomOLD
else:
zoomOLDx = self.zoomOLD*(-1)
arg /= zoomp**zoomOLDx
return arg
def standart_unbind(self):
self.resFlag = True
self.c.bind_class(self.master1,"<Return>", self.kill)
self.c.unbind('<Button-3>')
self.c.unbind_class(self.master1, "<Control-Button-3>")
self.c.unbind('<Button-1>')
self.c.unbind('<Shift-Button-1>')
self.c.unbind_class(self.master1, "<Delete>")
def kill(self, event=None):#Возвращает все в исходное состояние
if self.rect:
self.c.delete(self.rect)
self.rect = None
if self.col:
#fill = self.ALLOBJECT[self.col]['fill']
#self.back_color(fill, self.col)
self.c.delete('C'+self.col)
self.col = None
if self.curent_class:
del self.curent_class
self.curent_class = None
t=self.c.find_withtag('c1')
if t:
self.c.delete('c1')
if 'trace' in self.ALLOBJECT:
self.c.delete('trace')
del self.ALLOBJECT['trace']
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
self.c.delete('clone')
self.c.delete('temp')
self.unpriv = False
self.edit_clone = False
self.move_clone = False
self.mirror_clone = False
self.rotate_clone = False
self.edit_dim_clone = False
self.copy_clone = False
self.line_clone = False
self.circle_clone = False
self.arc_clone = False
self.dim_clone = False
self.dimR_clone = False
self.trim_dim_clone = False
self.edit_dim_text_clone = False
self.c.bind_class(self.c,"<Motion>", self.gpriv)
self.c.bind_class(self.master1, "<Control-Button-3>", self.BackCol)
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.c.bind('<Button-3>', self.edit_butt_3)
self.c.bind_class(self.master1, "<Return>", self.old_function)
self.c.bind_class(self.master1, "<Delete>", self.delete)
self.c.unbind_class(self.c,"<Shift-1>")
self.c.unbind_class(self.master1, "<Motion>")
self.c.unbind_class(self.c, "<End>")
self.dialog.config(text = 'Command:')
self.info.config(text = '')
self.resFlag = False
self.lappingFlag = False
self.anchorFlag = False
self.trace_on = False
self.trace_obj_on = False
self.command.delete(0,END)
self.com = None
self.sbros()
self.func_collection = []
self.temp_collection = []
self.c.config(cursor = 'crosshair')
draft_gui.gui.update_prop()
def comY_N(self, default):#Проверяет, как ответил пользователь на yes/No, если не ответил - вернет то, что по умолчанию
com=self.command.get()
if com:
com = str.upper(com)
if com in ['N', 'Y']:
default = com
else:
default = 'unknow'
self.command.delete(0,END)
else:
default = default
return default
def comOrKill(self, event=None):#Берет значени из коммандной строки если пользователь вписал число, отчищает ком.строку
com=self.command.get()
try:
com = float(com)
except ValueError:
self.info.config(text = 'Unknow command')
self.com = None
else:
self.com = com
def commer(self,x1,y1,x2,y2): #Просчитывает координаты если self.com == True
self.comOrKill()
if self.com:
self.com = self.coordinator2(self.com)
dx=x1-x2
dy=y1-y2
if self.ortoFlag == False and x1 != x2 and y1 != y2:
dx0=math.sqrt((self.com*self.com * dx*dx)/(dy*dy + dx*dx))
dy0=dx0*dy/dx
i=1
if x1<x2:
i=-1
x2=x1-i*dx0
y2=y1-i*dy0
else:
x2,y2=self.orto(x1,y1,x2,y2)
x2,y2=self.ortoRes(x1,y1,x2,y2)
return x2,y2
def gpriv(self,event=None, x=None, y = None, f = None):
t=self.c.find_withtag('c1')#Найти значек привязки
if t:#Если таковой имеется
self.c.delete('c1')#Удалить значек
if self.resFlag == False:#Если режим рисования не включен
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
self.find_privs = ['t'] #Отчистить список привязок
#if event:#Если метод вызван событием
self.find_privs = ['t']#Список приметивов привязки (с разделителем)
#self.c.unbind_class(self.master1, "<End>")#Выключить реакцию на End - перебор привязок
x=event.x#Получить координаты положения курсора
y=event.y
if not self.unpriv:
self.x_priv, self.y_priv, self.tip_p = self.priv(x,y)#Проверить, попадает ли положение курсора под возможность привязки к приметиву
p = self.tip_p #Тип привязки
self.priv_coord = (self.x_priv, self.y_priv)#Назначить кориеж координат привязки
if x!=self.x_priv or y!=self.y_priv or p != self.tip_p: #Если координаты курсора не равны координатам привязки или тип привязки сменился
self.tip_p = p #Переназначить тип привязки на новый
x1 = self.x_priv
y1 = self.y_priv
r=self.size_simbol_p
if p == 'r':#Если тип привязки - к конечной точке
self.c.create_oval(x1-r,y1-r,x1+r,y1+r, outline = self.priv_color,width = 3, fill = None, tags = 'c1')#Нарисовать знак привязки - круг
elif p == 'c':#Если привязка к середине нарисовать знак привязки - треугольник
self.c.create_line(x1-r,y1-r,x1+r,y1-r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1-r,y1-r,x1,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1,y1+r,x1+r,y1-r,fill=self.priv_color,width=3,tags='c1')
elif p == 'X': #Если привязка к пересечению - нарисовать знак Х
self.c.create_line(x1-r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
elif p == 'N': #Если привязка к ближайшей - нарисовать знак N
self.c.create_line(x1-r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1-r,y1-r,x1-r,y1+r,fill=self.priv_color,width=3,tags='c1')
self.c.create_line(x1+r,y1-r,x1+r,y1+r,fill=self.priv_color,width=3,tags='c1')
if self.resFlag == False:#Если режим рисования не включен
#self.c.tag_unbind('sel', "<Button-1>")#Выключить возможность выделения
self.c.bind('<Button-1>', self.editEvent)#Включить возможность редактирования узла
else:
if not self.rect:
el = get_object.get_obj(x, y, graf, 'all')
if el and el not in ['trace', 'trace_o']:
if el == self.Old_sel:
pass
elif el != self.Old_sel and self.resFlag == False:
if self.Old_sel:
self.c.delete('C'+self.Old_sel)
self.Old_sel = None
if el not in self.collection and el in self.ALLOBJECT:#Если объект отсутствует в коллекции - сменить цвет, включить флаг
select_clone.Select_clone((el,), graf)
self.Old_sel = el
#if self.resFlag == False:#Если ничего не рисуется - выключить действия lapping
self.c.bind('<Button-1>', self.collektor_sel)
self.c.bind('<Shift-Button-1>', self.collektor_desel)
else:
if self.Old_sel:
self.c.delete('C'+self.Old_sel)
self.Old_sel = None
if self.resFlag == False:
self.c.bind('<Button-1>', self.lapping_sel)
self.c.bind('<Shift-Button-1>', self.lapping_desel)
if any((self.edit_clone, self.move_clone, self.copy_clone, self.mirror_clone, self.rotate_clone, self.edit_dim_clone, self.line_clone, self.circle_clone, self.arc_clone, self.dim_clone, self.edit_dim_text_clone, self.dimR_clone, self.trim_dim_clone)):
if len(self.collection) < 100:
self.c.delete('temp')
if self.edit_clone:
self.curent_class.editEvent2()
elif self.move_clone:
self.curent_class.moveEvent3()
elif self.copy_clone:
self.curent_class.copyEvent3()
elif self.mirror_clone:
self.curent_class.mirrorEvent4()
elif self.rotate_clone:
self.curent_class.rotateEvent5()
elif self.line_clone:
self.curent_class.line2()
elif self.circle_clone:
self.curent_class.circle2()
elif self.arc_clone:
self.curent_class.arc3()
elif self.dim_clone:
self.curent_class.risDim4()
elif self.edit_dim_text_clone:
self.editDimTextPlace2()
elif self.dimR_clone:
self.curent_class.risDimR3()
elif self.trim_dim_clone:
self.curent_class.dim_conf()
if self.trace_on:
trace.tracer(graf, self.trace_x1, self.trace_y1, self.trace_x2, self.trace_y2, self.snap_s, self.angle_s)
if self.trace_obj_on:
trace_object.tracer_obj(graf, self.priv_coord[0], self.priv_coord[1], self.snap_s)
else:
self.x_priv, self.y_priv = x, y
self.priv_coord = (self.x_priv, self.y_priv)
def priv(self, x, y, f = None):#Принимает координаты точки и может принять список приметивов, возвращает координаты точки привязки если привязка допустима, в противном случае не изменяет пришедших координат
if f == None:#Если список приметивов не назначен
find = list(self.c.find_overlapping(x-self.snap_s,y-self.snap_s,x+self.snap_s,y+self.snap_s))#Найти все приметивы, попадающие в квадрат вокруг точки
if self.rect:
try:
find.remove(self.rect)
except ValueError:
pass
else:
find = [f]#Иначе приобразовать пришедший список в список
tip_p = None
stopFlag = False
xi=x#Приравнять возвращаемые координаты к тем, которые пришли
yi=y
priv_coord_list = [] #Список координат приметивов с тегом привязки
### Привязка к одному приметиву ###
for i in find:#Перебрать список приметивов
obj_tags = self.c.gettags(i)
t = obj_tags[1]
if t[0] == 'C' or 'temp' in obj_tags or 'text' in obj_tags:
continue
tags = self.ALLOBJECT[t]['id'][i]
if 'priv' in tags and 'line' in tags:#Если у приметива есть тег привязки
xy = self.c.coords(i)#Взять координаты приметива
priv_coord_list.append((xy,'line'))#Добавить координаты приметива в список
ay1 = abs(y-xy[1])#Получить разность координат приметива и пришедших в метод координат (коорд. курсора)
ay2 = abs(y-xy[3])
ax1 = abs(x-xy[0])
ax2 = abs(x-xy[2])
if ax1<=ax2 and ax1<=self.snap_s and ay1<=self.snap_s: #or ay2<=self.snap_s):#Если разность координат х по первой точке меньше, чем по второй и эта разность меньше self.snap_s
if ay1<=ay2 and ay1<self.snap_s:#Если разность по у первой точки меньше, чем по второй и эта разность меньше self.snap_s
yt=xy[1]#Текущимь координатами взять координаты первой точки приметива
xt=xy[0]
tip_p = 'r'#Тип привязки - к конточке
self.find_privs.append(i)#Добавить приметив в список привязок
if stopFlag == False:#Если точка привязки не была найдена ранее
xi = xt#Назначить возвращаемые координаты равными координатам точки
yi = yt
stopFlag = True#Остановить назначение возвращаемых координат
elif ax1>=ax2 and ax2<=self.snap_s and ay2<=self.snap_s:#(ay1<=self.snap_s or ay2<=self.snap_s):#Если разность координат х по второй точке меньше, чем по первой и эта разность меньше self.snap_s
if ay1>=ay2 and ay2<=self.snap_s:
yt=xy[3]
xt=xy[2]
tip_p = 'r'
self.find_privs.append(i)
if stopFlag == False:
xi = xt
yi = yt
stopFlag = True
else:#Если не подошел не один из вариантов - привязка к середине
y0=xy[1]-((xy[1]-xy[3])/2.0)
x0=xy[0]-((xy[0]-xy[2])/2.0)
if abs(x-x0)<=self.snap_s and abs(y-y0)<=self.snap_s:
yt=y0
xt=x0
tip_p = 'c'
self.find_privs.append(i)
if stopFlag == False:
xi = xt
yi = yt
stopFlag = True
if 'temp' in tags or 'cir_centr' in tags or 'a_centr' in tags:
tip_p = None
stopFlag = False
xi=x
yi=y
elif 'priv' in tags and 'cir' in tags:
xy = self.c.coords(i)
priv_coord_list.append((xy,'cir'))
xc,yc,R = self.coord_circle(xy[0],xy[1],xy[2],xy[3])
if abs(x - xc)<=self.snap_s:
if abs(yc-R - y) <= self.snap_s:
xi = xc
yi = yc-R
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(yc+R - y) <= self.snap_s:
xi = xc
yi = yc+R
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(y - yc)<=self.snap_s:
if abs(xc-R - x) <= self.snap_s:
xi = xc-R
yi = yc
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(xc+R - x) <= self.snap_s:
xi = xc+R
yi = yc
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif 'priv' in tags and 'a' in tags:
xy = self.c.coords(i)
start = float(self.c.itemcget(i, 'start'))
extent = float(self.c.itemcget(i, 'extent'))
priv_coord_list.append((xy,'a'))
xc, yc, dx1, dy1, dx2, dy2 = get_conf.get_arc_coord(xy[0],xy[1],xy[2],xy[3], start, extent)
R = (xy[2]-xy[0])/2.0
if abs(x - dx1)<=self.snap_s:
if abs(y - dy1)<=self.snap_s:
xi = dx1
yi = dy1
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
elif abs(x - dx2)<=self.snap_s:
if abs(y - dy2)<=self.snap_s:
xi = dx2
yi = dy2
tip_p = 'r'
stopFlag = True
self.find_privs.append(i)
if stopFlag == False and self.snap_near == True and self.resFlag == True and priv_coord_list:#Привязка к ближайшей точке на линии - Если неподошел не один предыдущий вариант
for i in priv_coord_list:
xy = priv_coord_list[priv_coord_list.index(i)][0]
if i[1] == 'line':
xt, yt = calc.min_distanse(xy[0],xy[1],xy[2],xy[3], x,y)
if xt:
xi = xt
yi = yt
tip_p = 'N'
break
else:
xc,yc,R = self.coord_circle(xy[0],xy[1],xy[2],xy[3])
if i[1] == 'a':
xt,yt, d = calc.min_distanse_cir(xc, yc, R, x, y)
if d<=self.snap_s:
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'N'
break
elif i[1] == 'cir':
xt,yt, d = calc.min_distanse_cir(xc, yc, R, x, y)
if d<=self.snap_s:
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'N'
break
### Привязка к двум приметивам ###
if len(priv_coord_list) > 1 and stopFlag == False:#Привязка к пересечению
for i in priv_coord_list:#Перебрать список координат
ind = priv_coord_list.index(i)#Взять индекс текущего элемента
if ind == 0:#Если элемент первый - приверять пересечение с последующим
ii = 1
else:#Иначе с предыдущим
ii = -1
r = priv_coord_list[ind+ii]
if i[1] == 'line' and r[1] == 'line':
xt,yt = calc.intersection_l_l(i[0][0],i[0][1],i[0][2],i[0][3],r[0][0],r[0][1],r[0][2],r[0][3])#Проверить есть ли точка пересечения, если да - вычислить
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
if (xt != i[0][0] or yt != i[0][1]) and (xt != i[0][2] or yt != i[0][3]):#Если эта точка не равна одной из точек
if (xt != r[0][0] or yt != r[0][1]) and (xt != r[0][2] or yt != r[0][3]):
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
elif (i[1] == 'line' and r[1] in ['cir', 'a']) or (i[1] in ['cir', 'a'] and r[1] == 'line'):
if i[1] == 'line':
line = i
circle = r
else:
line = r
circle = i
xc,yc,R = self.coord_circle(circle[0][0],circle[0][1],circle[0][2],circle[0][3])
xt,yt = calc.intersection_l_c(xc, yc, R, line[0][0], line[0][1], line[0][2], line[0][3], x, y)
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
elif i[1] in ['cir', 'a'] and r[1] in ['cir', 'a']:
xc1,yc1,R1 = self.coord_circle(i[0][0],i[0][1],i[0][2],i[0][3])
xc2,yc2,R2 = self.coord_circle(r[0][0],r[0][1],r[0][2],r[0][3])
xt, yt = calc.intersection_c_c(xc1, yc1, R1, xc2, yc2, R2, x, y)
if xt != None:#Если точка есть
if (abs(y-yt)<=self.snap_s) and (abs(x-xt)<=self.snap_s):#Если разность координат не превышает self.snap_s
xi = xt#Назначить координаты выхода полученным координатам
yi = yt
tip_p = 'X'#Тип привязки - пересечение
break
if f == None: #Если список приметивов не был назначен - включить функцию перебора привязки
self.perebor_priv()
return xi,yi,tip_p #Вернуть координаты привязки, и ее тип
def coord_circle(self, x1,y1,x2,y2):
xc = (x1+x2)/2.0
yc = (y1+y2)/2.0
R = (x2-x1)/2.0
return xc, yc, R
def perebor_priv(self):
if len(self.find_privs)>2:
self.c.bind_class(self.master1, "<End>", self.end_priv)
def end_priv(self, event):#Переберает тип привязки, если есть варианты
t_index = self.find_privs.index('t')
if len(self.find_privs) == t_index+1:
self.gpriv(x = self.x_priv, y = self.y_priv, f = self.find_privs[0])
self.find_privs.remove('t')
self.find_privs.insert(1, 't')
else:
self.gpriv(x = self.x_priv, y = self.y_priv, f = self.find_privs[t_index+1])
self.find_privs.remove('t')
self.find_privs.insert(t_index+1, 't')
def ort(self, event=None, color_only = None):
if not color_only:
if self.ortoFlag == True:
self.ortoFlag = False
self.button_orto.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.ortoFlag=True
self.button_orto.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.ortoFlag == False:
self.button_orto.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_orto.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def trac(self, event=None, color_only = None):
if 'trace' in self.ALLOBJECT:
self.c.delete('trace')
del self.ALLOBJECT['trace']
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
if not color_only:
if self.tracingFlag == True:
self.tracingFlag = False
self.trace_on = False
self.button_trace.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.tracingFlag=True
self.button_trace.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.tracingFlag == False:
self.button_trace.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_trace.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def trac_obj(self, event=None, color_only = None):
if 'trace_o' in self.ALLOBJECT:
self.c.delete('trace_o')
del self.ALLOBJECT['trace_o']
if not color_only:
if self.tracing_obj_Flag == True:
self.tracing_obj_Flag = False
self.trace_obj_on = False
self.button_trace_obj.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.tracing_obj_Flag=True
self.button_trace_obj.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.tracing_obj_Flag == False:
self.button_trace_obj.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_trace_obj.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def snap_n(self, event = None, color_only = None):
if not color_only:
if self.snap_near == True:
self.snap_near = False
self.button_snap_N.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.snap_near=True
self.button_snap_N.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
else:
if self.snap_near == False:
self.button_snap_N.config(bg='white',fg='black', activebackground = 'white', activeforeground = 'black')
else:
self.button_snap_N.config(bg='blue',fg='red', activebackground = 'blue', activeforeground = 'red')
def orto(self,x1,y1,x2,y2):
if abs(x2-x1)>abs(y2-y1):
y2=y1
else:
x2=x1
return x2,y2
def ortoRes(self,x1,y1,x2,y2):
i=1
if x2==x1:
if y1>y2:
i=-1
y2=y1+i*self.com
else:
if x1>x2:
i=-1
x2=x1+i*self.com
return x2,y2
#выход из редактора
def exitMethod(self):
self.save_change()
e = self.donate()
if e != 3:
self.master1.destroy()
#please, donate!
def d(self):
eroot = Toplevel()
eroot.title('Donate adress')
self.don = PhotoImage(file = os.path.join(self.appPath, 'res', 'don.gif'))
val = '5213 2437 3660 6532'
val2 = '1Kgect6s92fhRftHeuLVqgPJ1FYt7Lhee9'
val3 = '[email protected]'
l_donate = Text(eroot, relief = 'flat', height = 1, width = len(val), bg = 'light gray')
l_donate11 = Label(eroot, text = 'Bank card:')
l_donate12 = Label(eroot, text = 'Bitcoin adress:')
l_donate13 = Label(eroot, text = 'PayPal account:')
l_donate2 = Text(eroot, relief = 'flat', height = 1, width = len(val2), bg = 'light gray')
l_donate3 = Text(eroot, relief = 'flat', height = 1, width = len(val3), bg = 'light gray')
l_donate.insert(END, val)
l_donate2.insert(END, val2)
l_donate3.insert(END, val3)
l_donate.configure(state = DISABLED)
l_donate2.configure(state = DISABLED)
l_donate3.configure(state = DISABLED)
l_donate11.grid(row=0, column = 0, sticky = 'w', padx = 3, pady = 3)
l_donate.grid(row=0, column = 1, sticky = 'w', padx = 3, pady = 3)
l_donate12.grid(row=1, column = 0, sticky = 'w', padx = 2, pady = 3)
l_donate2.grid(row=1, column = 1, sticky = 'w', padx = 3, pady = 3)
l_donate13.grid(row=2, column = 0, sticky = 'w', padx = 2, pady = 3)
l_donate3.grid(row=2, column = 1, sticky = 'w', padx = 3, pady = 3)
but = Button(eroot, text = 'OK', command = eroot.destroy)
but.grid(row=3, column = 1, sticky = 'e', padx = 10, pady = 10)
def donate(self):
e = randint(2, 5)
if e == 3:
eroot = Toplevel()
eroot.title('Please, donate!')
self.don = PhotoImage(file = os.path.join(self.appPath, 'res', 'don.gif'))
eroot.tk.call('wm', 'iconphoto', eroot._w, self.don)
eroot.resizable(width=FALSE, height=FALSE)
from locale import getdefaultlocale
lang = getdefaultlocale()
if lang[0][0:2] != 'ru':
donate_text = '''
SAMoCAD - open sours program,
so developers want to eat.
You can help the project.
'''
feed = 'Feed :-)'
away = 'Get away from me!'
else:
donate_text = '''
SAMoCAD - бесплатная програма,
поэтому разработчики хотят кушать.
Вы можете помочь проекту.
'''
feed = 'Накормить'
away = 'Отстаньте от меня!'
l_donate = Label(eroot, justify = LEFT, text = donate_text)
self.imag = PhotoImage(file = os.path.join(self.appPath, 'res', 'icon3.gif'))
but = Button(eroot, text = feed, command = self.d)
but2 = Button(eroot, text = away, command = self.master1.destroy)
ca = Canvas(eroot, width = 100, height = 100)
ca.create_image(0,0,anchor=NW,image = self.imag)
ca.grid(row=0, column = 0, rowspan = 2, padx = 5, pady = 5)
l_donate.grid(row=0, column = 1,columnspan = 2, padx = 10, pady = 10)
but.grid(row=1, column = 1, padx = 10, pady = 10)
but2.grid(row=1, column = 2, padx = 10, pady = 10)
return e
#РИСОВАНИЕ ОБЪЕКТОВ - СОБЫТИЯ
#ПОСТРОЕНИЕ ВРЕМЕННЫХ ЛИНИЙ
def temp_lines(self, event):
self.oldinfo = self.info.cget('text')
self.info.config(text = (self.oldinfo + ' Create temp lines - line 2'))
Num = self.c.gettags(self.c.find_withtag('current'))[1]
self.temp_collection.append(Num)
#self.c.tag_bind('Line', '<Button-3>', self.temp_lines2)
def temp_lines2(self, event):
self.info.config(text = self.oldinfo)
Num = self.c.gettags(self.c.find_withtag('current'))[1]
self.temp_collection.append(Num)
stopFlag = False
if len(self.temp_collection) > 1:
for i in self.temp_collection:
if i not in self.ALLOBJECT:
stopFlag = True
if stopFlag == False:
c = map(lambda i: self.c.coords(self.c.find_withtag(i)[0]), self.temp_collection)
x, y = calc.intersection_stright(c[0][0],c[0][1],c[0][2],c[0][3],c[1][0],c[1][1],c[1][2],c[1][3])
if x != None:
self.c_line(x-5,y-5,x+5,y+5,fill='gray',width=1,sloy = 'temp', tip = 'temp')
self.temp_lines_list.append(self.Nline)
self.c_line(x+5,y-5,x-5,y+5,fill='gray',width=1,sloy = 'temp', tip = 'temp')
self.temp_lines_list.append(self.Nline)
self.c.bind_class(self.master1, "<Control-KeyPress-j>", self.del_temp_lines)
self.temp_collection = []
def del_temp_lines(self, event=None):
find = self.c.find_withtag('temp')
del_list = []
for i in find:
Num = self.c.gettags(i)[1]
del_list.append(Num)
if del_list:
self.delete(elements = del_list)
self.c.unbind_class(self.master1, "<Control-KeyPress-j>")
#ЛИНИЯ
def risLine(self):
self.curent_class = line.Line(graf)
#РАЗМЕР
def risDim(self):
self.curent_class = dimension.Dimension(graf)
def risDimR(self):
self.curent_class = dimension.Dimension_R(graf)
#ТЕКСТ
def risText(self, event = None):
self.curent_class = text_line.Text(graf)
#МЕТОДЫ ЧЕРЧЕНИЯ ОБЪЕКТОВ
#КРУГ
def risCircle(self):
self.curent_class = circle.Circle(graf)
#ДУГА
def risArc(self):
self.curent_class = arc.Arc(graf)
#ЛИНИЯ
def c_line(self, x1, y1, x2, y2, width = None, sloy = None, fill = None, stipple = None, factor_stip = None, tip = 'norm'):
self.curent_class = line.c_line(graf, x1, y1, x2, y2, width, sloy, fill, stipple, factor_stip, tip)
def copy_line(self, content):
self.Nlined += 1
self.Nline = 'L' + str(self.Nlined)
self.ALLOBJECT[self.Nline] = self.ALLOBJECT[content].copy()
return self.Nline
#РАЗМЕР ЛИНЕЙНЫЙ
def dim(self,x1,y1,x2,y2,x3,y3,text=None, sloy = None,
fill = None,
size = None,
ort = None,
text_change = 'unchange',
text_place = None,
s=None,
vv_s=None,
vr_s = None,
arrow_s = None,
type_arrow = None,
s_s = None,
w_text = None,
font = None):
self.curent_class = dimension.c_dim(self,x1,y1,x2,y2,x3,y3,text, sloy,
fill,
size,
ort,
text_change,
text_place,
s,
vv_s,
vr_s,
arrow_s,
type_arrow,
s_s,
w_text,
font)
def get_snap_line(self, cont):#Находит в сложном объекте линии привязки
lines = []
if cont[0] in ('d', 'r'):
for i in self.ALLOBJECT[cont]['id']:
tag = self.ALLOBJECT[cont]['id'][i]
if 'priv' in tag:
if 'dim_text' not in tag and 'dimr_text' not in tag:
lines.append(i)
else:
for i in self.ALLOBJECT[cont]['id']:
tag = self.ALLOBJECT[cont]['id'][i]
if 'priv' in tag:
lines.append(i)
return lines
#РАЗМЕР РАДИУСНЫЙ
def dimR(self,x1,y1,x2,y2, text=None, sloy = None,
fill = None,
size = None,
s=None,
vr_s = None,
arrow_s = None,
type_arrow = None,
s_s = None,
w_text = None,
font = None,
Rn = None):
self.curent_class = dimension.c_dimR(self,x1,y1,x2,y2, text, sloy,
fill,
size,
s,
vr_s,
arrow_s,
type_arrow,
s_s,
w_text,
font,
Rn)
#КРУГ
def c_circle(self,x0,y0,xr = None, yr = None, width = None, sloy = None, fill = None, R = None):
self.curent_class = circle.c_circle(graf, x0, y0, xr, yr, width, sloy, fill, R)
#ДУГА
def c_arc(self,x0,y0,xr1=None, yr1=None, xr2=None, yr2=None, width = None, sloy = None, fill = None, R = None, start = None, extent = None):
self.curent_class = arc.c_arc(graf, x0,y0,xr1, yr1, xr2, yr2, width, sloy, fill, R, start, extent)
#ТЕКСТ
def c_text(self, x, y, text, anchor = 'sw', sloy = None, fill = None, angle = 0, size = None, s_s = None, w_text = None, font = None): #Текст - отрисовка
self.curent_class = text_line.c_text(graf, x, y, text, anchor, sloy, fill, angle, size, s_s, w_text, font)
#Печать картинки в постскрипт
def print_postScript(self, event = None):
self.curent_class = print_ps.Print_PS(graf)
def enumerator_p(self):
self.enumerator +=1
if self.enumerator == self.auto_save_step:
self.enumerator = 0
self.fileCurSave()
def exportDXF(self):
self.s_dxf = True
self.fileSave()
self.s_dxf = False
def fileSave(self):
opt = options = {}
if self.s_dxf == False:
options['defaultextension'] = '.svg'
options['filetypes'] = [('SVG files', '.svg'),
('text files', '.txt'),
('all files', '.*')]
else:
options['defaultextension'] = '.dxf'
options['filetypes'] = [('text files', '.dxf'),
('all files', '.*')]
options['initialdir'] = self.appPath
options['initialfile'] = 'draft_1'
options['parent'] = self.master1
options['title'] = 'Save file'
f = tkFileDialog.asksaveasfile(mode='w', **opt)
if f:
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
if self.s_dxf == False:
#save = save_file.saver(graf)
save = to_svg.Svger(graf)
for i in save.write_list:
#if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
#f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i)#.encode("utf8"))
f.close()
self.saveFlag = True
self.changeFlag = False
self.current_file = f.name
self.master1.title(self.prog_version + ' - ' + self.current_file)
else:
save = to_dxf.Dxfer(graf)
for i in save.write_list:
f.writelines("%s\n" % i)
if self.zoomOLD != 0:
self.c.move('obj',-dx-10,-dy-10)
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomp**self.zoomOLD,zoomp**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def fileCurSave(self):
if self.saveFlag == False:
self.fileSave()
else:
back_file = self.current_file[0:-4]+'.bak'
try:
copyfile(self.current_file, back_file)
except IOError:
print ('Error Back file')
f = open(self.current_file, 'w')
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
fileName, fileExt = os.path.splitext(f.name)
if fileExt == '.svg':
save = to_svg.Svger(graf)
for i in save.write_list:
f.writelines("%s\n" % i)
elif fileExt == '.txt':
save = save_file.saver(graf)
for i in save.write_list:
if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i.encode("utf8"))
'''
save = save_file.saver(graf)
for i in save.write_list:
if i[:8] == 'self.c_t' or i[:8] == 'self.dim':
f.write(codecs.BOM_UTF8)
f.writelines("%s\n" % i.encode("utf8"))
'''
f.close()
self.changeFlag = False
if self.zoomOLD != 0:
self.c.move('obj',-dx-10,-dy-10)
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomp**self.zoomOLD,zoomp**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def new(self, event = None):
self.save_change()
self.saveFlag = False
self.changeFlag = False
self.current_file = 'New draft'
self.master1.title(self.prog_version + ' - ' + self.current_file)
self.delete(elements = self.ALLOBJECT.keys())
self.sbros_all()
def sbros_all(self):
self.collection = []
self.collectionBack = []
self.history_undo = []
def save_change(self):
if self.ALLOBJECT and self.changeFlag == True:
save_yes_no = tkMessageBox.askyesno('Save draft?', 'Save drawing?')
if save_yes_no == True:
self.fileCurSave()
def importDXF(self):
self.s_dxf = True
self.fileOpen()
self.s_dxf = False
def fileOpen(self, event = None):
self.save_change()
opt = options = {}
if self.s_dxf == False:
options['defaultextension'] = '.svg'
options['filetypes'] = [('SVG files', '.svg'),
('text files', '.txt'),
('all files', '.*')]
options['title'] = 'Open file'
else:
options['defaultextension'] = '.dxf'
options['filetypes'] = [('DXF files', '.dxf'),
('all files', '.*')]
options['title'] = 'Import from DXF'
options['initialdir'] = self.appPath
options['parent'] = self.master1
f = tkFileDialog.askopenfile(**opt)
if f:
if self.ALLOBJECT:
self.delete(elements = self.ALLOBJECT.keys())
if self.zoomOLD != 0:
if self.zoomOLD>0:
self.c.scale('obj',0,0,zoomm**self.zoomOLD,zoomm**self.zoomOLD)
else:
zoomOLDx=self.zoomOLD*(-1)
self.c.scale('obj',0,0,zoomp**zoomOLDx,zoomp**zoomOLDx)
zoomOLD = self.zoomOLD
self.zoomOLD = 0
xynach=self.c.coords(self.nachCoordy)
dx=-xynach[0]
dy=-xynach[1]
self.c.move('obj',dx+10,dy+10)
if self.s_dxf == False:
fileName, fileExt = os.path.splitext(f.name)
if fileExt == '.svg':
text = f.read()
SVG = from_svg.SVGopener(text, graf)
list_command = SVG.command_list
elif fileExt == '.txt':
list_command = f.readlines()
else:
text = f.read()
DXF = from_dxf.DXFopener(text)
list_command = DXF.command_list
if list_command:
errors = ''
for i in list_command:
try:
exec(i)
except:
errors += (i+'\n')
continue
if errors:
print ('Errors in opened file!')
print ('___________________________')
print (errors)
print ('___________________________')
f.close()
if self.s_dxf == False:
self.saveFlag = True
self.changeFlag = False
self.current_file = f.name
self.master1.title(self.prog_version + ' - ' + self.current_file)
else:
self.saveFlag = False
self.changeFlag = True
self.current_file = 'New draft'
self.master1.title(self.prog_version + ' - ' + self.current_file)
self.sbros_all()
self.c.move('obj',-dx-10,-dy-10)
self.zoomOLD = zoomOLD
if zoomOLD != 0:
if zoomOLD>0:
self.c.scale('obj',0,0,zoomp**zoomOLD,zoomp**zoomOLD)
else:
zoomOLDx=zoomOLD*(-1)
self.c.scale('obj',0,0,zoomm**zoomOLDx,zoomm**zoomOLDx)
def zoomP(self,x,y):
self.c.scale('obj',x,y,zoomp,zoomp)
def zoomM(self,x,y):
self.c.scale('obj',x,y,zoomm,zoomm)
def zoommerP(self):
x=self.frame1.winfo_width()/2.0
y=self.frame1.winfo_height()/2.0
self.zoomOLD += 1
if self.zoomOLD == -19:
self.c.itemconfig('t_LOD', state = 'normal')
self.c.itemconfig('snap_text', stipple = ('@'+os.path.join(self.appPath, 'res', '00.xbm')))
self.c.scale('obj',x,y,zoomp,zoomp)
def zoommerM(self):
x=self.frame1.winfo_width()/2.0
y=self.frame1.winfo_height()/2.0
self.zoomOLD -= 1
if self.zoomOLD ==-20:
self.c.itemconfig('t_LOD', state = 'hidden')
self.c.itemconfig('snap_text', stipple = '')
self.c.scale('obj',x,y,zoomm,zoomm)
def Mzoommer(self,event):
x = self.priv_coord[0]#event.x
y = self.priv_coord[1]#event.y
#if x<0:
#x = -x
#if y<0:
# y = -y
if event.delta > 0 or event.num == 4:
self.zoomOLD += 1
if self.zoomOLD == -19:
self.c.itemconfig('t_LOD', state = 'normal')
self.c.itemconfig('snap_text', stipple = ('@'+os.path.join(self.appPath, 'res', '00.xbm')))
self.c.scale('obj',x,y,zoomp,zoomp)
#self.zoomP(x,y)
else:
self.zoomOLD -= 1
if self.zoomOLD ==-20:
self.c.itemconfig('t_LOD', state = 'hidden')
self.c.itemconfig('snap_text', stipple = '')
self.c.scale('obj',x,y,zoomm,zoomm)
#self.zoomM(x,y)
def mouseMove(self,event):
global x1,y1
self.c.move('obj', event.x - x1, event.y - y1)
x1 = event.x
y1 = event.y
def OnMouseMove(self,event):
global x1,y1
x1 = event.x
y1 = event.y
root = Tk()
graf=Graphics()
graf.initial(root)
root.mainloop()
| VVS1864/SAMoCAD | src/core.py | Python | apache-2.0 | 89,765 |
#-----------------------------------------------------------------------
#
# Loader-specific SYS extensions
#
#-----------------------------------------------------------------------
from asm import *
# Peek into the ROM's symbol table
videoY = symbol('videoY')
sysArgs = symbol('sysArgs0')
vPC = symbol('vPC')
vLR = symbol('vLR')
#-----------------------------------------------------------------------
# Extension SYS_LoaderNextByteIn_32
#-----------------------------------------------------------------------
# sysArgs[0:1] Current address
# sysArgs[2] Checksum
# sysArgs[3] Wait value (videoY)
label('SYS_LoaderNextByteIn_32')
ld([videoY]) #15
xora([sysArgs+3]) #16
bne('.sysNbi#19') #17
ld([sysArgs+0],X) #18
ld([sysArgs+1],Y) #19
ld(IN) #20
st([Y,X]) #21
adda([sysArgs+2]) #22
st([sysArgs+2]) #23
ld([sysArgs+0]) #24
adda(1) #25
st([sysArgs+0]) #26
ld(hi('REENTER'),Y) #27
jmp(Y,'REENTER') #28
ld(-32/2) #29
# Restart the instruction in the next timeslice
label('.sysNbi#19')
ld([vPC]) #19
suba(2) #20
st([vPC]) #21
ld(-28/2) #22
ld(hi('REENTER'),Y) #23
jmp(Y,'REENTER') #24
nop() #25
#-----------------------------------------------------------------------
# Extension SYS_LoaderProcessInput_48
#-----------------------------------------------------------------------
# sysArgs[0:1] Source address
# sysArgs[2] Checksum
# sysArgs[4] Copy count
# sysArgs[5:6] Destination address
label('SYS_LoaderProcessInput_48')
ld([sysArgs+1],Y) #15
ld([sysArgs+2]) #16
bne('.sysPi#19') #17
ld([sysArgs+0]) #18
suba(65, X) #19 Point at first byte of buffer
ld([Y,X]) #20 Command byte
st([Y,Xpp]) #21 X++
xora(ord('L')) #22 This loader lumps everything under 'L'
bne('.sysPi#25') #23
ld([Y,X]); C('Valid command')#24 Length byte
st([Y,Xpp]) #25 X++
anda(63) #26 Bit 6:7 are garbage
st([sysArgs+4]) #27 Copy count
ld([Y,X]) #28 Low copy address
st([Y,Xpp]) #29 X++
st([sysArgs+5]) #30
ld([Y,X]) #31 High copy address
st([Y,Xpp]) #32 X++
st([sysArgs+6]) #33
ld([sysArgs+4]) #34
bne('.sysPi#37') #35
# Execute code (don't care about checksum anymore)
ld([sysArgs+5]); C('Execute')#36 Low run address
st([vLR]) #37 https://forum.gigatron.io/viewtopic.php?p=29#p29
suba(2) #38
st([vPC]) #39
ld([sysArgs+6]) #40 High run address
st([vPC+1]) #41
st([vLR+1]) #42
ld(hi('REENTER'),Y) #43
jmp(Y,'REENTER') #44
ld(-48/2) #45
# Invalid checksum
label('.sysPi#19')
wait(25-19); C('Invalid checksum')#19 Reset checksum
# Unknown command
label('.sysPi#25')
ld(ord('g')); C('Unknown command')#25 Reset checksum
st([sysArgs+2]) #26
ld(hi('REENTER'),Y) #27
jmp(Y,'REENTER') #28
ld(-32/2) #29
# Loading data
label('.sysPi#37')
ld([sysArgs+0]); C('Loading data')#37 Continue checksum
suba(1, X) #38 Point at last byte
ld([Y,X]) #39
st([sysArgs+2]) #40
ld(hi('REENTER'),Y) #41
jmp(Y,'REENTER') #42
ld(-46/2) #43
#-----------------------------------------------------------------------
# Extension SYS_LoaderPayloadCopy_34
#-----------------------------------------------------------------------
# sysArgs[0:1] Source address
# sysArgs[4] Copy count
# sysArgs[5:6] Destination address
label('SYS_LoaderPayloadCopy_34')
ld([sysArgs+4]) #15 Copy count
beq('.sysCc#18') #16
suba(1) #17
st([sysArgs+4]) #18
ld([sysArgs+0],X) #19 Current pointer
ld([sysArgs+1],Y) #20
ld([Y,X]) #21
ld([sysArgs+5],X) #22 Target pointer
ld([sysArgs+6],Y) #23
st([Y,X]) #24
ld([sysArgs+5]) #25 Increment target
adda(1) #26
st([sysArgs+5]) #27
bra('.sysCc#30') #28
label('.sysCc#18')
ld(hi('REENTER'),Y) #18,29
wait(30-19) #19
label('.sysCc#30')
jmp(Y,'REENTER') #30
ld(-34/2) #31
#-----------------------------------------------------------------------
#
#-----------------------------------------------------------------------
| kervinck/gigatron-rom | Apps/Loader/SYS_Loader_v2.py | Python | bsd-2-clause | 5,211 |
# a script to rename files by changing their names in a TC checksum list
#Ange Albertini 2013
import sys
import hashlib
import glob
fn = sys.argv[1]
with open(fn, "r") as s:
r = s.readlines()
sums = {}
for s in r:
s = s.strip()
sha1, file = s[:40], s[40 + 2:]
file = file[file.rfind("\\") + 1:]
if sha1 not in sums:
sums[sha1] = file
else:
del(sums[sha1])
unknowns = []
for f in sorted(glob.glob("*")):
try:
with open(f, "rb") as file:
content = file.read()
except:
continue
sum = hashlib.sha1(content).hexdigest()
if sum in sums and f != sums[sum]:
print 'ren %s %s' % (('"' + f + '"').ljust(30), ('"' + sums[sum] + '"').ljust(30))
elif sum not in sums:
unknowns += [f]
if unknowns:
print "unknowns or duplicates: " + " ".join(unknowns)
| angea/corkami | misc/python/ren_sum.py | Python | bsd-2-clause | 891 |
import argparse
from random import *
import math
from geo2d.geometry import *
import itertools
from tkinter import *
import time
from intervalset import AngularIntervalSet,odd
import sys,traceback
from collections import namedtuple
def random_color():
return "#%02x%02x%02x" % (randrange(0,255),randrange(0,255),randrange(0,255))
def as_color(r,g,b):
return "#%02x%02x%02x" % (255*r,255*g,255*b)
def gray(x):
return as_color(x,x,x)
def Heading(dir,rho=None):
return Vector(rho or 1.0,dir,coordinates="polar")
def overlap(poly1,poly2,c1=None,c2=None,r1=None,r2=None):
b1 = poly1.bounding_box
b2 = poly2.bounding_box
c1 = c1 or poly1.centroid
c2 = c2 or poly1.centroid
r1 = r1 or poly1.diameter
r2 = r2 or poly2.diameter
d = r1+r2
c = Point((r1*c1.x+r2*c2.x)/d,(r1*c1.y+r2*c2.y)/d)
# This isn't the right value for o -- the real overlap is lense shaped.
o = d - c1.distance_to(c2)
if o < 0 or b1.left > b2.right or b1.right < b2.left or b1.top < b2.bottom or b1.bottom > b2.top:
return False
for p1 in poly1.vertices:
if c.distance_to(p1) < o and poly2.has(p1):
return True
for p2 in poly2.vertices:
if c.distance_to(p2) < o and poly1.has(p2):
return True
e2_near_edges = [e for e in poly2.edges if c.distance_to(e) < o and e.length > 0]
for e1 in poly1.edges:
if c.distance_to(e1) < o and e1.length > 0:
for e2 in e2_near_edges:
if e1.intersection(e2):
return True
return False
class DisplayObject:
def __init__(self,world,loc):
self.world = world
self.location = loc
def on_tick(self):
pass
def draw(self, canvas,s):
pass
def remove_image(self,canvas):
for part in self.tk_ids.values(): canvas.delete(part)
self.tk_ids = {}
def place_image_part(self,part,canvas,s,*coords):
canvas.coords(self.tk_ids[part],*[s*coord for coord in coords])
def displacement_to(self,other):
loc = other.location if hasattr(other, "location") else other
return self.world.wrap(Vector(self.location,loc))
def distance_to(self,other):
return self.displacement_to(other).rho
def stipple(r):
if r < 12: return 'gray12'
elif r < 25: return 'gray25'
elif r < 50: return 'gray50'
elif r < 75: return 'gray75'
else: return 'gray75' #None
class Sound(DisplayObject):
def __init__(self,world,loc,volume,text):
DisplayObject.__init__(self,world,loc)
self.volume = volume
self.text = text
self.tk_ids = {}
self.age = 1
self.faded = False
def on_tick(self):
self.age += 1
def draw(self, canvas, s):
self.remove_image(canvas)
if self.age < self.volume:
loc = self.location
self.tk_ids = {
'text': canvas.create_text(
s*loc.x, s*loc.y-20,
text=self.text,
font=('Helvetica',int(s*((self.volume+self.age)/10)**2)),
fill=gray(max(self.age/self.volume-0.2,0)),
stipple=stipple(100-(self.age*100)/self.volume)
)
}
else:
self.faded = True
class PhysicalObject(DisplayObject):
collision_cost = 10
def __init__(self,world,loc):
DisplayObject.__init__(self,world,loc)
self.tk_ids = {}
self.color = {"fill": "black"}
self.mass = 10000.0 # Achored to the ground
self.heading = Vector(0,0) # Going nowhere
self.hardness = 0.01
self.dead = False
self.goal = False
self.anchored = False
self.floor_mat = False
def dump_status(self):
print(self.location)
def on_collision(self,dir,other):
pass
def on_damage(self,amount):
pass
def radius(self):
return 1
def core_radius(self):
return self.radius()
def outline(self):
r = self.radius()
loc = self.location
sides = 8
q = 2*math.pi/sides
return [(loc.x+r*math.cos(a*q),loc.y+r*math.sin(a*q)) for a in range(0,sides)]
def die(self,*args):
self.dead = True
def draw(self, canvas,s):
r = self.radius()
if r > 0 and not self.dead:
if not self.tk_ids:
self.tk_ids = { 'image': canvas.create_oval(50, 50, s*2*r, s*2*r, **self.color) }
canvas.tag_lower(self.tk_ids['image'])
loc = self.location
self.place_image_part('image',canvas,s,loc.x-r, loc.y-r,loc.x+r, loc.y+r)
else:
self.remove_image(canvas)
class Block(PhysicalObject):
def __init__(self,world,loc,l=10,w=1,heading=None,density=1):
PhysicalObject.__init__(self,world,loc)
self.heading = heading or Heading(uniform(0.0,2*math.pi))*0.0001
self.length = l
self.width = w
self.mass = l*w*density
self.color = {"fill":"brown", "stipple":'gray75'}
self.hardness = 1.0
self.anchored = True
def outline(self):
loc = self.location
h = self.heading.normalized
l = self.length
w = self.width
lx,ly = l*h.x, l*h.y
wx,wy = w*h.y,-w*h.x
return [
(loc.x+lx+wx,loc.y+ly+wy),
(loc.x+lx-wx,loc.y+ly-wy),
(loc.x-lx-wx,loc.y-ly-wy),
(loc.x-lx+wx,loc.y-ly+wy),
]
#def on_tick(self):
# self.heading = Heading(self.heading.phi+0.01)*0.0001
def core_radius(self):
return min(self.length,self.width)
def create_image(self,canvas):
self.tk_ids = { 'body': canvas.create_polygon(1,1,**self.color) }
def place_image(self,canvas,s):
self.place_image_part('body', canvas,s,*[coord for p in self.outline() for coord in p])
def draw(self, canvas,s):
if self.dead:
self.remove_image(canvas)
else:
if not self.tk_ids: self.create_image(canvas)
self.place_image(canvas,s)
def radius(self):
return math.sqrt(self.length**2+self.width**2)
class Secretion(DisplayObject):
trails = []
undrawn = []
dead = set()
resized = set()
def __init__(self,world,loc):
DisplayObject.__init__(self,world,loc)
self.size = 2
self.tk_id = None
Secretion.undrawn.append(self)
def on_tick():
for t in range(0,100):
i = randrange(0,1000)
if i < len(Secretion.trails):
if Secretion.trails[i].size < 15:
Secretion.resized.add(Secretion.trails[i])
else:
Secretion.dead.add(Secretion.trails.pop(i))
while len(Secretion.trails) > 1000:
Secretion.dead.add(Secretion.trails.pop(randrange(0,len(Secretion.trails))))
def on_draw(canvas,s):
for t in Secretion.undrawn:
loc = t.location
t.tk_id = canvas.create_oval(loc.x*s-1, loc.y*s-1, loc.x*s+1, loc.y*s+1, outline="blue")
Secretion.trails.append(t)
Secretion.undrawn = []
for t in Secretion.resized:
x1,y1,x2,y2 = canvas.coords(t.tk_id)
canvas.coords(t.tk_id,x1-1,y1-1,x2+1,y2+1)
canvas.itemconfig(t.tk_id,outlinestipple=stipple(100-3*(x2-x1)))
for t in Secretion.dead: canvas.delete(t.tk_id)
Secretion.dead.clear()
Secretion.resized.clear()
class Critter(PhysicalObject):
def __init__(self,world,brain_class,name):
PhysicalObject.__init__(self,world,None)
if isinstance(name,int):
name = brain_class.owner + brain_class.code + str(name)
self.name = name
self.heading = Heading(uniform(0.0,2*math.pi))
profile = [uniform(0.5,0.8) for i in range(0,10)]
self.shape = [1.0,0.8]+profile+list(reversed(profile))+[0.8]
self.mass = 25
self.color = {"fill":random_color(), "smooth":1, "stipple":'gray50'}
self.brain = brain_class()
self.last_spoke = -10
self.sense_data = None
self.whats_under = set()
self.age = 0
self.hardness = 0.5
self.secreting = None
self.finished = 0
self.sense_depiction_ids = []
world.spawn(self)
def dump_status(self):
print(self.name)
self.brain.dump_status()
print(self.location)
metabolic_cost = 0.01
movement_cost = 0.1
acceleration_cost = 40
def on_tick(self):
if self.dead: return
if not self.undead(): self.age += 1
for x in list(self.whats_under):
if x.radius() <= 0 or self.distance_to(x) > self.radius() + x.radius():
self.whats_under.remove(x)
self.sense_data = self.senses()
self.mass -= self.metabolic_cost + self.movement_cost*self.heading.rho*self.heading.rho
if self.mass <= 0:
self.die(sound="..nnn...nnn..nnn...",volume=6)
else:
self.act(self.brain_on_tick() or "Pass")
self.location.translate(self.heading.x,self.heading.y)
self.location = self.world.wrap(self.location)
def on_damage(self,amount):
if amount > 0.1:
self.say("Ooof!")
self.mass -= amount
if self.mass <= 0:
self.die(volume=0)
def on_collision(self,dir,other):
if other.goal:
self.die("Yes!")
self.finished += self.age
self.whats_under.add(other)
self.act(self.brain_on_collision(dir,other) or ("Eat" if isinstance(other,Food) else "Pass"))
def teleport_to(self,world,loc):
self.world = world
self.location = loc
def die(self,sound="Aaaaaaaaa...!",volume=20):
self.mass = 0
if not self.dead:
self.say(sound,volume=volume)
PhysicalObject.die(self)
def arise(self):
if self.dead:
self.dead = None
self.mass = 15
self.metabolic_cost = 0.0
self.movement_cost = 0.0
self.color["outline"] = "green"
self.color["width"] = 2
self.brain = ZombieBrain()
def undead(self):
return (self.dead is not True) and (self.dead is not False)
def say(self,msg,volume=10):
if not self.dead:
if self.world.clock - self.last_spoke > 10:
self.world.sound(self.location,volume,msg)
self.last_spoke = self.world.clock
max_speed = 2.5
def act(self,cmd):
if self.dead: return
if self.secreting and randrange(0,2) == 0:
Secretion(self.world,self.location)
sharpest_turn = 0.5
if not cmd is None:
word = cmd.split()
if word[0] == "Stop":
self.heading = self.heading.normalized*(1/10000)
elif word[0] == "Go":
self.heading = self.heading.normalized
elif word[0] == "Turn":
self.heading = Heading(self.heading.phi+sorted([-sharpest_turn,float(word[1]),sharpest_turn])[1],rho=self.heading.rho)
elif word[0] == "Accelerate":
initial_speed = self.heading.rho
self.heading *= float(word[1])
if self.heading.rho > self.max_speed:
self.heading *= self.max_speed/self.heading.rho
#if self.heading.rho != initial_speed:
# print("%s lost %5.3f accelerating %5.3f -> %5.3f" %
# (self.name,self.acceleration_cost*(self.heading.rho-initial_speed)**2,initial_speed,self.heading.rho))
self.mass -= self.acceleration_cost*(self.heading.rho-initial_speed)**2
elif word[0] == "Attack":
pass
elif word[0] == "Eat":
for f in self.whats_under:
if isinstance(f,Food) and f.value > 0:
self.say("Yum")
f.value -= 0.1
self.mass += 0.1
break
elif word[0] == "Pass":
pass
elif word[0] == "Secrete":
if word[1] == "Nothing" or word[1] == "0":
self.secreting = None
else:
self.secreting = int(word[1])
elif word[0] == "Say":
self.say(cmd[4:])
else:
print("Unknown command: {}".format(cmd))
def radius(self):
return math.sqrt(self.mass) if self.mass > 0 else 0
def core_radius(self):
return self.radius()*min(self.shape)
def relative_heading(self,x):
return (x-self.heading.phi+math.pi) % 2*math.pi + math.pi
def relative_heading_to(self,x):
return self.relative_heading(self.displacement_to(x).phi)
Sight = namedtuple("Sight", "color distance direction width change")
Sound = namedtuple("Sound", "text direction volume age")
Smell = namedtuple("Smell", "smell strength change")
State = namedtuple("State", "moving speed health age")
def senses(self):
return {
'sight': self.sight(), # set of tuples: (color,distance,direction,width,change)
'smell': set(), # set of tuples: (smell,strength,change)
'hearing': set([Critter.Sound(s.text,self.relative_heading_to(s),s.volume/(1+self.distance_to(s)),s.age) for s in self.world.sounds]),
'taste': set([type(x) for x in self.whats_under]),
'body': Critter.State(self.heading.rho>0.1,self.heading.rho,self.mass,self.age),
'gps': self.location,
'compass': self.heading.phi,
}
def sight(self):
objects = [((self.world.width+self.world.height)/8,0,AngularIntervalSet(-1.0,1.0),self.world)]
forward = self.heading.phi
for o in self.world.neighbors_of(self):
if o != self:
d = self.displacement_to(o)-self.eye_offset()
d -= d*(o.radius()/d.rho)
# We can only see things above our horizon, which we aproximate be saying they have
# to be within a quarter of the way around in either direction.
if (d.x/self.world.width)**2 + (d.y/self.world.height)**2 < (1/4)**2:
# We can only see things in front of us
a = (d.phi-forward+math.pi) % (2*math.pi) - math.pi
delta_a = math.atan2(o.radius(),d.rho)
if abs(a)-abs(delta_a) < 1:
objects.append((d.rho,uniform(0.0,1.0),AngularIntervalSet(a-delta_a,a+delta_a),o))
# We can only see things within a two radian field of view
view_mask = AngularIntervalSet(-1,+1)
sights = set()
for dist,rand,image,obj in sorted(objects):
# we see all of the object not blocked by something closer
visable_part = view_mask.intersection(image)
# the object blocks things that are further
view_mask = view_mask.intersection(image.inverse())
for segment in visable_part.ranges():
color = obj.color['outline'] if 'outline' in obj.color else obj.color['fill']
sights.add(Critter.Sight(color,dist,(segment[0]+segment[1])/2,segment[1]-segment[0],0))
# stop when our field of view is full
if view_mask.trivial(): break
# TODO: figure out how to calculate change
return sights
def outline(self):
r = self.radius()
loc = self.location
phi = self.heading.phi
q = 2*math.pi/len(self.shape)
return [(loc.x+r*d*math.cos(a*q+phi),loc.y+r*d*math.sin(a*q+phi)) for a, d in enumerate(self.shape)]
def eye_offset(self):
r = self.radius()
phi = self.heading.phi
d = self.shape[0]*0.7
return Vector(r*d*math.cos(phi),r*d*math.sin(phi))
def create_image(self,canvas):
self.tk_ids = {
'body': canvas.create_polygon(1,1,**self.color),
'text': canvas.create_text(50,50, text=self.name),
'eye': canvas.create_oval(50, 50, 1, 1, fill = "white"),
'pupil': canvas.create_oval(50, 50, 1, 1, fill = "black", outline="blue"),
}
def place_image(self,canvas,s):
outline = self.outline()
loc = self.location
px = 1/s
eye_off = self.eye_offset()
x,y = loc.x+eye_off.x,loc.y+eye_off.y
pp = self.displacement_to(self.world.pits[0] if self.world.pits else self.world.random_location()).normalized
self.place_image_part('text', canvas,s,loc.x,loc.y)
self.place_image_part('body', canvas,s,*[coord for p in outline for coord in p])
self.place_image_part('eye', canvas,s, x-1, y-1, x+1, y+1)
self.place_image_part('pupil',canvas,s, x+pp.x/2-px, y+pp.y/2-px, x+pp.x/2+px, y+pp.y/2+px)
def draw(self, canvas,s):
if self.dead:
self.remove_image(canvas)
else:
if not self.tk_ids: self.create_image(canvas)
self.place_image(canvas,s)
self.draw_senses(canvas,s)
def draw_senses(self,canvas,s):
for part in self.sense_depiction_ids: canvas.delete(part)
outline = self.outline()
x,y = outline[0]
sd = self.sight()
for sight in sd: #self.sense_data['sight']:
d = sight.distance
h = sight.direction + self.heading.phi
self.sense_depiction_ids.append(
canvas.create_line(x*s,y*s, s*(x+d*math.cos(h)),s*(y+d*math.sin(h)), fill=sight.color,stipple=stipple(200/(d+1)))
)
def brain_on_tick(self):
try:
return self.brain.on_tick(self.sense_data)
except Exception as e:
traceback.print_tb(sys.exc_info()[-1], limit=3)
self.die()
def brain_on_collision(self,dir,other):
try:
return self.brain.on_collision(dir,other,self.sense_data)
except Exception as e:
traceback.print_tb(sys.exc_info()[-1], limit=3)
self.die()
class CritterBrain:
code = ''
owner = None
def dump_status(self):
pass
def on_collision(self,dir,other,senses):
pass
def on_attack(self,dir,attacker,senses):
pass
def on_tick(self,senses):
pass
class ZombieBrain(CritterBrain):
def on_tick(self,senses):
non_green = [c for c in senses['sight'] if c.color != 'green']
closest = min(non_green, key=lambda s: s.distance) if non_green else None
target = closest.direction if closest else uniform(-0.2,0.1) if senses['compass'] > math.pi else uniform(-0.1,0.2)
if randrange(0,50) == 0:
return "Say Brains...."
elif randrange(0,50) == 0:
return "Say Urrrr...."
elif senses['body'].speed > 0.2:
return "Accelerate {}".format(0.1/senses['body'].speed)
else:
return "Turn {}".format(target)
class Food(PhysicalObject):
def __init__(self,world,loc,value):
PhysicalObject.__init__(self,world,loc)
self.value = value
self.color = {"fill": "dark green", "outline": "green", "width":3}
self.anchored = True
self.floor_mat = True
def on_tick(self):
# Could spoil, spread, or...?
pass
def on_collision(self,dir,other):
pass
def radius(self):
if self.value < 0: self.value = 0
return math.sqrt(self.value)
class Pit(PhysicalObject):
def __init__(self,world,loc):
PhysicalObject.__init__(self,world,loc)
self.r = 10
self.color = {"fill": "black", "outline": "dark red"}
self.anchored = True
def on_tick(self):
pass
def on_collision(self,dir,other):
other.location = self.location
other.die()
def radius(self):
return self.r
class GoldStar(Block):
def __init__(self,world,loc):
PhysicalObject.__init__(self,world,loc)
self.r = 5
self.color = {"fill": "gold"}
self.anchored = True
def on_tick(self):
pass
def on_collision(self,dir,other):
other.finished -= 50
self.location = self.world.random_location()
def radius(self):
return self.r
def core_radius(self):
return self.radius()/2
def outline(self):
r = [self.radius(),self.core_radius()]
loc = self.location
sides = 10
q = 2*math.pi/sides
return [(loc.x+r[a%2]*math.cos(a*q),loc.y+r[a%2]*math.sin(a*q)) for a in range(0,sides)]
class World:
height = 100
width = 200
neighborhood_refresh = 4
neighborhood_radius_x = width/6 +Critter.max_speed*neighborhood_refresh
neighborhood_radius_y = height/6+Critter.max_speed*neighborhood_refresh
color = {"fill":"#000"}
def __init__(self,tick_time=0.1,tick_limit=-1,food=50,pits=0,stars=0,warn=False,blocks=0,zombies=False,stop_count=None):
self.critters = []
self.starting_critters = []
self.world_view = WorldView(self,5)
self.food = [Food(self,self.random_location(),randrange(2,16)) for i in range(0,food)]
self.pits = [Pit(self,self.random_location()) for i in range(0,pits)]
self.stars = [GoldStar(self,self.random_location()) for i in range(0,stars)]
self.blocks = [Block(self,self.random_location(),randrange(1,10),randrange(1,10)) for i in range(0,blocks)]
#self.finish_line()
self.maze(6,12)
self.sounds = []
self.clock = 0
self.neighbors = {}
self.tick_time = tick_time
self.tick_limit = tick_limit
self.warn = warn
self.zombies_allowed = zombies
self.zombies = []
self.stop_count = stop_count
def finish_line(self):
fl_segments = 10
fl_height = self.height / fl_segments
for i in range(0,fl_segments):
self.blocks.append(Block(self,Point(self.width-15,(i+0.5)*fl_height),1,fl_height/2-0.1,Heading(0),10000))
self.blocks[-1].goal = True
def maze(self,h,w):
walls = set([(x,y) for x in range(0,2*w) for y in range(0,2*h) if odd(x) != odd(y)])
cells = set([(0,0)])
while len(cells) < h*w:
x,y = (2*randrange(0,w),2*randrange(0,h))
dir = randrange(0,2)
dist = 4*randrange(0,2)-2
#x0,y0 = ((x+dir*dist) % (2*w),(y+(1-dir)*dist) % (2*h))
x0,y0 = (x+dir*dist,y+(1-dir)*dist)
if (0 <= x0 < 2*w) and (0 <= y0 < 2*h) and ((x0,y0) in cells) != ((x,y) in cells):
cells.add((x0,y0) if (x,y) in cells else (x,y))
#print((x,y),(x0,y0))
if dist < 0:
walls.remove((x0-(dir*dist)//2,y0-((1-dir)*dist)//2))
else:
walls.remove((x +(dir*dist)//2,y +((1-dir)*dist)//2))
cell_w = self.width/(2*w)
cell_h = self.height/(2*h)
for x,y in walls:
for i in [-3,-1,1,3]:
if odd(x):
p = Point((x+0.8)*cell_w,(y+0.8+i/4)*cell_h)
self.blocks.append(Block(self,p,2,cell_h/4,Heading(0),1000))
else:
p = Point((x+0.8+i/4)*cell_w,(y+0.8)*cell_h)
self.blocks.append(Block(self,p,cell_w/4,2,Heading(0),1000))
def random_location(self):
return Point(randrange(0,self.width),randrange(0,self.height))
def spawn(self,critter):
self.critters.append(critter)
self.starting_critters.append(critter)
critter.teleport_to(self,self.random_location())
def dump_status(self):
for c in self.critters:
c.dump_status()
def physical_objects(self):
return self.critters + self.food + self.pits + self.stars + self.blocks
def display_objects(self):
return self.physical_objects() + self.sounds
def sound(self,loc,volume,text):
self.sounds.append(Sound(self,loc,volume,text))
def find_neighbors(self,c):
self.neighbors[c] = set([self.blocks[-1]])
others = set(self.physical_objects())
others.remove(c)
for o in others:
disp = c.displacement_to(o)
if (disp.x/self.neighborhood_radius_x)**2 + (disp.y/self.neighborhood_radius_y)**2 < 1:
self.neighbors[c].add(o)
def neighbors_of(self,c):
if not c in self.neighbors: self.find_neighbors(c)
return self.neighbors[c]
def run(self):
stop_count = self.stop_count or len(self.starting_critters) and math.log(math.e*len(self.starting_critters))
while self.world_view.window_open and self.clock != self.tick_limit and len([c for c in self.critters if c.dead == False]) >= stop_count:
loop_start = time.time()
self.clock += 1
self.lighting = sorted([0,2*math.cos(self.clock/1000),1])[1]
self.sounds = [s for s in self.sounds if not s.faded]
self.food = [f for f in self.food if f.value > 0]
if self.zombies_allowed:
self.zombies += [c for c in self.critters if c.dead]
self.critters = [c for c in self.critters if not c.dead]
if self.lighting == 0:
for c in self.zombies:
c.arise()
self.critters.append(c)
self.zombies = []
Secretion.on_tick()
shuffle(self.critters)
if self.clock % self.neighborhood_refresh == 0:
self.neighbors = {}
for c in self.display_objects():
c.on_tick()
changes = []
checked = {}
for c in self.critters+self.blocks:
if not c.anchored:
checked[c] = True
c_outline = c.outline()
c_polygon = Polygon(c_outline)
core_radius = c.core_radius()
for o in self.neighbors_of(c):
if not checked.get(o,False):
d = c.distance_to(o)
if d >= c.radius() + o.radius():
pass # they missed
elif d < core_radius + o.core_radius():
# solid hit
self.process_collision(c,o,changes)
elif overlap(c_polygon,Polygon(o.outline()),c1=c.location,c2=o.location,r1=c.radius(),r2=o.radius()):
# glancing blow
self.process_collision(c,o,changes)
for o,d_phi,d_loc in changes:
o.heading = Heading(o.heading.phi+d_phi,rho=o.heading.rho/2)
o.location = self.wrap(Point(Vector(o.location)+d_loc))
self.world_view.on_tick()
excess_time = self.tick_time-(time.time()-loop_start)
if excess_time > 0:
time.sleep(excess_time)
elif self.warn:
print("Tick over time by ",-excess_time," seconds!")
def process_collision(self,a,b,changes):
d = b.displacement_to(a).normalized
v = a.heading - b.heading
impact = d.dot(v)**2
for x,other,s in [[a,b,+1],[b,a,-1]]:
if not other.floor_mat:
relative_mass = 1.0 - (0.0 if other.anchored else x.mass/(a.mass+b.mass))
if not x.anchored:
changes.append([x,s*((d-v*0.1*relative_mass).phi-d.phi),d*(1+abs(v.dot(d)))*s*relative_mass])
x.on_damage(impact*(x.collision_cost/100)*relative_mass*other.hardness/x.hardness)
a.on_collision(-d,b)
b.on_collision( d,a)
def wrap(self,p):
h = self.height
w = self.width
if isinstance(p,Point): return Point(p.x % w,p.y % h)
if isinstance(p,Vector): return Vector((p.x+w/2) % w - w/2,(p.y+h/2) % h - h/2)
return p
def print_stats(self):
print("Food remaining: ",sum(f.value for f in self.food))
print("Brains available: ",len(Brains.available))
print("Critters at start: ",len(self.starting_critters))
print("Critters remaining: ",len(self.critters))
for c in sorted(self.starting_critters,key=lambda c: (-(c.finished or self.clock),c.age,c.mass),reverse=True):
status = ("finished at %5.2f" % (c.finished*self.tick_time)) if c.finished else {False:"alive",True:"%5.2f" % (c.age*self.tick_time),None:"Undead"}[c.dead]
print(" %5s %20s %5.1f" % (c.name,status,c.mass))
class WorldView:
def __init__(self,world,scale):
self.world = world
self.scale = scale
self.tk = Tk()
self.tk.title("Critters")
self.tk.resizable(0, 0)
self.tk.wm_attributes("-topmost", 1)
self.canvas_height = scale*world.height
self.canvas_width = scale*world.width
self.canvas = Canvas(self.tk, width=self.canvas_width, height=self.canvas_height, highlightthickness=0)
self.canvas.pack()
self.tk.update()
self.window_open = True
def they_hit_close():
self.window_open = False
self.tk.protocol("WM_DELETE_WINDOW",they_hit_close)
def menu(evt):
tk = Tk()
btnq = Button(tk, text="Quit", command=tk.destroy)
btnq.pack({"side": "bottom"})
tk.title('Menu')
tk.resizable(0, 0)
tk.wm_attributes("-topmost", 1)
tk.update()
self.canvas.bind_all('<KeyPress-m>', menu)
def on_tick(self):
if self.window_open:
self.canvas.config(background=gray(self.world.lighting))
Secretion.on_draw(self.canvas,self.scale)
for sprite in self.world.display_objects():
sprite.draw(self.canvas,self.scale)
self.tk.update_idletasks()
self.tk.update()
class Users:
registered = []
current = None
initial = None
def register(name):
Users.registered.append(name)
Users.current = name
Users.initial = name[0:1]
def initial(ch):
Users.initial = ch
class Brains:
registered = {}
available = []
codes = None
def register(brain_class):
u = Users.current
if (not Brains.codes) or (brain_class.code == Brains.codes):
if not u in Brains.registered.keys():
Brains.registered[u] = []
Brains.registered[u].append(brain_class)
Brains.available.append(brain_class)
brain_class.owner = Users.initial
parser = argparse.ArgumentParser()
parser.add_argument('-t', default=0.1, type=float)
parser.add_argument('-n', default= -1, type=int)
parser.add_argument('-c', default= 10, type=int)
parser.add_argument('-f', default=100, type=int)
parser.add_argument('-p', default= 0, type=int)
parser.add_argument('-s', default= 0, type=int)
parser.add_argument('-b', default= 0, type=int)
parser.add_argument('-w', default=False, action='store_true')
parser.add_argument('-z', default=False, action='store_true')
parser.add_argument('--metabolic_cost', default = 0.01, type=float)
parser.add_argument('--movement_cost', default = 0.1, type=float)
parser.add_argument('--acceleration_cost', default = 40, type=float)
parser.add_argument('--collision_cost', default = 10, type=float)
parser.add_argument('--stop_count', default = None, type=int)
parser.add_argument('--codes')
parser.add_argument('files', nargs=argparse.REMAINDER)
cmd = parser.parse_args()
Critter.metabolic_cost = cmd.metabolic_cost
Critter.movement_cost = cmd.movement_cost
Critter.acceleration_cost = cmd.acceleration_cost
PhysicalObject.collision_cost = cmd.collision_cost
Brains.codes = cmd.codes
import atexit
import glob,re
for file in cmd.files or glob.glob("*_brains.py"):
match = re.search('^(.+)_brains.py$', file)
if match:
Users.register(match.group(1))
try:
exec(compile(open(file, "r").read(), file, 'exec'))
except Exception as e:
traceback.print_exception(*sys.exc_info(),limit=1)
if not Brains.available:
print("No brains available!")
exit()
w = World(
tick_time = cmd.t,
tick_limit = cmd.n,
food = cmd.f,
pits = cmd.p,
stars = cmd.s,
blocks = cmd.b,
warn = cmd.w,
zombies = cmd.z,
stop_count = cmd.stop_count
)
@atexit.register
def show_stats():
global w
w.print_stats()
for i in range(1,cmd.c+1):
c = Critter(w,Brains.available[i % len(Brains.available)],i)
#For race
#c.heading = Heading(0)
#c.location = Point(10,(i+0.5)*w.height/(cmd.c+1))
#For maze
c.location = Point((200/12)*(randrange(0,12)+0.25),(100/6)*(randrange(0,6)+0.25))
# [Critter(w,Brains.available[i % len(Brains.available)],i) for i in range(1,cmd.c+1)]
# [Critter(w,choice(Brains.available),i) for i in range(1,cmd.c+1)]
try:
w.run()
except KeyboardInterrupt:
pass
| FGCSchool-Math-Club/fgcs-math-club-2014 | critters.py | Python | bsd-2-clause | 33,180 |
import libcontext, bee
from bee.segments import *
from libcontext.socketclasses import *
from libcontext.pluginclasses import *
class become(bee.worker):
"""The become trigger fires every tick if its input has just become True"""
inp = antenna("pull", "bool")
b_inp = buffer("pull", "bool")
connect(inp, b_inp)
trig = output("push", "trigger")
trigfunc = triggerfunc(trig)
pullfunc = triggerfunc(b_inp)
# Name the inputs and outputs
guiparams = {
"inp": {"name": "Input"},
"trig": {"name": "Trigger"},
}
def update_value(self):
self.pullfunc()
if not self.previous_state and self.b_inp:
self.trigfunc()
self.previous_state = self.b_inp
def enable(self):
# Add a high-priority deactivate() listener on every tick
self.add_listener("trigger", self.update_value, "tick", priority=9)
def set_add_listener(self, add_listener):
self.add_listener = add_listener
def place(self):
self.previous_state = False
libcontext.socket(("evin", "add_listener"), socket_single_required(self.set_add_listener))
#Make sure we are enabled at startup
libcontext.plugin(("bee", "init"), plugin_single_required(self.enable)) | agoose77/hivesystem | sparta/triggers/become.py | Python | bsd-2-clause | 1,276 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('lexicon', '0083_cognateclass_loanEventTimeDepth'),
]
operations = [
migrations.RemoveField(
model_name='cognateclasslist',
name='cognateclasses',
),
migrations.AlterUniqueTogether(
name='cognateclasslistorder',
unique_together=set([]),
),
migrations.RemoveField(
model_name='cognateclasslistorder',
name='cognateclass',
),
migrations.RemoveField(
model_name='cognateclasslistorder',
name='cognateclass_list',
),
migrations.DeleteModel(
name='CognateClassList',
),
migrations.DeleteModel(
name='CognateClassListOrder',
),
]
| lingdb/CoBL-public | ielex/lexicon/migrations/0084_auto_20160713_1522.py | Python | bsd-2-clause | 922 |
from __future__ import absolute_import, division, print_function
import tensorflow as tf
from tensorflow import convert_to_tensor as to_T
from util.cnn import fc_layer as fc, conv_relu_layer as conv_relu
def _get_lstm_cell(num_layers, lstm_dim, apply_dropout):
if isinstance(lstm_dim, list): # Different layers have different dimensions
if not len(lstm_dim) == num_layers:
raise ValueError('the length of lstm_dim must be equal to num_layers')
cell_list = []
for l in range(num_layers):
lstm_cell = tf.contrib.rnn.BasicLSTMCell(lstm_dim[l], state_is_tuple=True)
# Dropout is only applied on output of the 1st to second-last layer.
# The output of the last layer has no dropout
if apply_dropout and l < num_layers-1:
dropout_cell = tf.contrib.rnn.DropoutWrapper(lstm_cell,
output_keep_prob=0.5)
else:
dropout_cell = lstm_cell
cell_list.append(dropout_cell)
else: # All layers has the same dimension.
lstm_cell = tf.contrib.rnn.BasicLSTMCell(lstm_dim, state_is_tuple=True)
# Dropout is only applied on output of the 1st to second-last layer.
# The output of the last layer has no dropout
if apply_dropout:
dropout_cell = tf.contrib.rnn.DropoutWrapper(lstm_cell,
output_keep_prob=0.5)
else:
dropout_cell = lstm_cell
cell_list = [dropout_cell] * (num_layers-1) + [lstm_cell]
cell = tf.contrib.rnn.MultiRNNCell(cell_list, state_is_tuple=True)
return cell
class AttentionSeq2Seq:
def __init__(self, input_seq_batch, seq_length_batch, T_decoder,
num_vocab_txt, embed_dim_txt, num_vocab_nmn, embed_dim_nmn,
lstm_dim, num_layers, EOS_token, encoder_dropout, decoder_dropout,
decoder_sampling, use_gt_layout=None, gt_layout_batch=None,
scope='encoder_decoder', reuse=None):
self.T_decoder = T_decoder
self.encoder_num_vocab = num_vocab_txt
self.encoder_embed_dim = embed_dim_txt
self.decoder_num_vocab = num_vocab_nmn
self.decoder_embed_dim = embed_dim_nmn
self.lstm_dim = lstm_dim
self.num_layers = num_layers
self.EOS_token = EOS_token
self.encoder_dropout = encoder_dropout
self.decoder_dropout = decoder_dropout
self.decoder_sampling = decoder_sampling
with tf.variable_scope(scope, reuse=reuse):
self._build_encoder(input_seq_batch, seq_length_batch)
self._build_decoder(use_gt_layout, gt_layout_batch)
def _build_encoder(self, input_seq_batch, seq_length_batch, scope='encoder',
reuse=None):
lstm_dim = self.lstm_dim
num_layers = self.num_layers
apply_dropout = self.encoder_dropout
with tf.variable_scope(scope, reuse=reuse):
T = tf.shape(input_seq_batch)[0]
N = tf.shape(input_seq_batch)[1]
self.T_encoder = T
self.N = N
embedding_mat = tf.get_variable('embedding_mat',
[self.encoder_num_vocab, self.encoder_embed_dim])
# text_seq has shape [T, N] and embedded_seq has shape [T, N, D].
embedded_seq = tf.nn.embedding_lookup(embedding_mat, input_seq_batch)
self.embedded_input_seq = embedded_seq
# The RNN
cell = _get_lstm_cell(num_layers, lstm_dim, apply_dropout)
# encoder_outputs has shape [T, N, lstm_dim]
encoder_outputs, encoder_states = tf.nn.dynamic_rnn(cell,
embedded_seq, seq_length_batch, dtype=tf.float32,
time_major=True, scope='lstm')
self.encoder_outputs = encoder_outputs
self.encoder_states = encoder_states
# transform the encoder outputs for further attention alignments
# encoder_outputs_flat has shape [T, N, lstm_dim]
encoder_h_transformed = fc('encoder_h_transform',
tf.reshape(encoder_outputs, [-1, lstm_dim]), output_dim=lstm_dim)
encoder_h_transformed = tf.reshape(encoder_h_transformed,
to_T([T, N, lstm_dim]))
self.encoder_h_transformed = encoder_h_transformed
# seq_not_finished is a shape [T, N, 1] tensor, where seq_not_finished[t, n]
# is 1 iff sequence n is not finished at time t, and 0 otherwise
seq_not_finished = tf.less(tf.range(T)[:, tf.newaxis, tf.newaxis],
seq_length_batch[:, tf.newaxis])
seq_not_finished = tf.cast(seq_not_finished, tf.float32)
self.seq_not_finished = seq_not_finished
def _build_decoder(self, use_gt_layout, gt_layout_batch, scope='decoder',
reuse=None):
# The main difference from before is that the decoders now takes another
# input (the attention) when computing the next step
# T_max is the maximum length of decoded sequence (including <eos>)
#
# This function is for decoding only. It performs greedy search or sampling.
# the first input is <go> (its embedding vector) and the subsequent inputs
# are the outputs from previous time step
# num_vocab does not include <go>
#
# use_gt_layout is None or a bool tensor, and gt_layout_batch is a tenwor
# with shape [T_max, N].
# If use_gt_layout is not None, then when use_gt_layout is true, predict
# exactly the tokens in gt_layout_batch, regardless of actual probability.
# Otherwise, if sampling is True, sample from the token probability
# If sampling is False, do greedy decoding (beam size 1)
N = self.N
encoder_states = self.encoder_states
T_max = self.T_decoder
lstm_dim = self.lstm_dim
num_layers = self.num_layers
apply_dropout = self.decoder_dropout
EOS_token = self.EOS_token
sampling = self.decoder_sampling
with tf.variable_scope(scope, reuse=reuse):
embedding_mat = tf.get_variable('embedding_mat',
[self.decoder_num_vocab, self.decoder_embed_dim])
# we use a separate embedding for <go>, as it is only used in the
# beginning of the sequence
go_embedding = tf.get_variable('go_embedding', [1, self.decoder_embed_dim])
with tf.variable_scope('att_prediction'):
v = tf.get_variable('v', [lstm_dim])
W_a = tf.get_variable('weights', [lstm_dim, lstm_dim],
initializer=tf.contrib.layers.xavier_initializer())
b_a = tf.get_variable('biases', lstm_dim,
initializer=tf.constant_initializer(0.))
# The parameters to predict the next token
with tf.variable_scope('token_prediction'):
W_y = tf.get_variable('weights', [lstm_dim*2, self.decoder_num_vocab],
initializer=tf.contrib.layers.xavier_initializer())
b_y = tf.get_variable('biases', self.decoder_num_vocab,
initializer=tf.constant_initializer(0.))
# Attentional decoding
# Loop function is called at time t BEFORE the cell execution at time t,
# and its next_input is used as the input at time t (not t+1)
# c.f. https://www.tensorflow.org/api_docs/python/tf/nn/raw_rnn
mask_range = tf.reshape(
tf.range(self.decoder_num_vocab, dtype=tf.int32),
[1, -1])
all_eos_pred = EOS_token * tf.ones(to_T([N]), tf.int32)
all_one_prob = tf.ones(to_T([N]), tf.float32)
all_zero_entropy = tf.zeros(to_T([N]), tf.float32)
if use_gt_layout is not None:
gt_layout_mult = tf.cast(use_gt_layout, tf.int32)
pred_layout_mult = 1 - gt_layout_mult
def loop_fn(time, cell_output, cell_state, loop_state):
if cell_output is None: # time == 0
next_cell_state = encoder_states
next_input = tf.tile(go_embedding, to_T([N, 1]))
else: # time > 0
next_cell_state = cell_state
# compute the attention map over the input sequence
# a_raw has shape [T, N, 1]
att_raw = tf.reduce_sum(
tf.tanh(tf.nn.xw_plus_b(cell_output, W_a, b_a) +
self.encoder_h_transformed) * v,
axis=2, keep_dims=True)
# softmax along the first dimension (T) over not finished examples
# att has shape [T, N, 1]
att = tf.nn.softmax(att_raw, dim=0)*self.seq_not_finished
att = att / tf.reduce_sum(att, axis=0, keep_dims=True)
# d has shape [N, lstm_dim]
d2 = tf.reduce_sum(att*self.encoder_outputs, axis=0)
# token_scores has shape [N, num_vocab]
token_scores = tf.nn.xw_plus_b(
tf.concat([cell_output, d2], axis=1),
W_y, b_y)
# predict the next token (behavior depending on parameters)
if sampling:
# predicted_token has shape [N]
logits = token_scores
predicted_token = tf.cast(tf.reshape(
tf.multinomial(token_scores, 1), [-1]), tf.int32)
else:
# predicted_token has shape [N]
predicted_token = tf.cast(tf.argmax(token_scores, 1), tf.int32)
if use_gt_layout is not None:
predicted_token = (gt_layout_batch[time-1] * gt_layout_mult
+ predicted_token * pred_layout_mult)
# token_prob has shape [N], the probability of the predicted token
# although token_prob is not needed for predicting the next token
# it is needed in output (for policy gradient training)
# [N, num_vocab]
# mask has shape [N, num_vocab]
mask = tf.equal(mask_range, tf.reshape(predicted_token, [-1, 1]))
all_token_probs = tf.nn.softmax(token_scores)
token_prob = tf.reduce_sum(all_token_probs *
tf.cast(mask, tf.float32), axis=1)
neg_entropy = tf.reduce_sum(all_token_probs *
tf.log(tf.maximum(1e-5, all_token_probs)), axis=1)
# is_eos_predicted is a [N] bool tensor, indicating whether
# <eos> has already been predicted previously in each sequence
is_eos_predicted = loop_state[2]
predicted_token_old = predicted_token
# if <eos> has already been predicted, now predict <eos> with
# prob 1
predicted_token = tf.where(is_eos_predicted, all_eos_pred,
predicted_token)
token_prob = tf.where(is_eos_predicted, all_one_prob,
token_prob)
neg_entropy = tf.where(is_eos_predicted, all_zero_entropy, neg_entropy)
is_eos_predicted = tf.logical_or(is_eos_predicted,
tf.equal(predicted_token_old, EOS_token))
# the prediction is from the cell output of the last step
# timestep (t-1), feed it as input into timestep t
next_input = tf.nn.embedding_lookup(embedding_mat, predicted_token)
elements_finished = tf.greater_equal(time, T_max)
# loop_state is a 5-tuple, representing
# 1) the predicted_tokens
# 2) the prob of predicted_tokens
# 3) whether <eos> has already been predicted
# 4) the negative entropy of policy (accumulated across timesteps)
# 5) the attention
if loop_state is None: # time == 0
# Write the predicted token into the output
predicted_token_array = tf.TensorArray(dtype=tf.int32, size=T_max,
infer_shape=False)
token_prob_array = tf.TensorArray(dtype=tf.float32, size=T_max,
infer_shape=False)
att_array = tf.TensorArray(dtype=tf.float32, size=T_max,
infer_shape=False)
next_loop_state = (predicted_token_array,
token_prob_array,
tf.zeros(to_T([N]), dtype=tf.bool),
tf.zeros(to_T([N]), dtype=tf.float32),
att_array)
else: # time > 0
t_write = time-1
next_loop_state = (loop_state[0].write(t_write, predicted_token),
loop_state[1].write(t_write, token_prob),
is_eos_predicted,
loop_state[3] + neg_entropy,
loop_state[4].write(t_write, att))
return (elements_finished, next_input, next_cell_state, cell_output,
next_loop_state)
# The RNN
cell = _get_lstm_cell(num_layers, lstm_dim, apply_dropout)
_, _, decodes_ta = tf.nn.raw_rnn(cell, loop_fn, scope='lstm')
predicted_tokens = decodes_ta[0].stack()
token_probs = decodes_ta[1].stack()
neg_entropy = decodes_ta[3]
# atts has shape [T_decoder, T_encoder, N, 1]
atts = decodes_ta[4].stack()
self.atts = atts
# word_vec has shape [T_decoder, N, 1]
word_vecs = tf.reduce_sum(atts*self.embedded_input_seq, axis=1)
predicted_tokens.set_shape([None, None])
token_probs.set_shape([None, None])
neg_entropy.set_shape([None])
word_vecs.set_shape([None, None, self.encoder_embed_dim])
self.predicted_tokens = predicted_tokens
self.token_probs = token_probs
self.neg_entropy = neg_entropy
self.word_vecs = word_vecs
| ronghanghu/n2nmn | models_shapes/nmn3_netgen_att.py | Python | bsd-2-clause | 14,699 |
"""
Tests for secure configuration
"""
from ConfigParser import ConfigParser
from unittest import TestCase
import logging
import re
import subprocess
import os
# Don't freak out about regular expression in string (for grep)
# pylint: disable=W1401
SSH_PORT = 22
class SecureConfig(TestCase):
"""
Test to ensure we don't shoot ourselves in the foot
"""
def setUp(self):
self._old_dir = os.getcwd()
os.chdir(os.getenv("ASIAQ_CONFIG", "."))
def tearDown(self):
os.chdir(self._old_dir)
def test_log_level_is_safe(self):
"""
Logging config does not contain debug logging
see https://docs.python.org/2/library/logging.html
"""
cmd = 'find . -type f -name "config.ini" | xargs grep -i "level.*=" | grep "\(NOTSET\|DEBUG\)"'
self.assertNotEqual(subprocess.call(cmd, shell=True), 0)
def test_yum_update(self):
"""
Ensure yum update is present in CentOS phase1 and not commented out
"""
self.assertEqual(subprocess.call(["grep", '^yum update -y', "init/centos6_phase1.sh"]), 0)
self.assertEqual(subprocess.call(["grep", '^yum update -y', "init/centos7_phase1.sh"]), 0)
def test_apt_update(self):
"""
Ensure apt-get update is present in Ubuntu phase1 and not commented out
"""
self.assertEqual(subprocess.call(["grep", '^apt-get update', "init/ubuntu_phase1.sh"]), 0)
def test_apt_upgrade(self):
"""
Ensure apt-get upgrade is present in Ubuntu phase1 and not commented out
"""
self.assertEqual(subprocess.call(["grep", '^apt-get upgrade', "init/ubuntu_phase1.sh"]), 0)
def _port_in_sg_rule(self, needle, haystack):
"""
True if needle if part of haystack port specification
"""
for ports in haystack:
ports = ports.split(":")
if len(ports) == 1 and needle == int(ports[0]):
return True
if len(ports) > 1 and needle >= int(ports[0]) and needle <= int(ports[1]):
return True
return False
def _allowed_ips(self):
"""
Return list of ips which ought to be able to ssh to production env
"""
daws_config_file = "disco_aws.ini"
daws_config = ConfigParser()
daws_config.read(daws_config_file)
deployenator = "mhcdiscodeployenator"
option = "eip@deploy"
return [daws_config.get(deployenator, option)] \
if daws_config.has_section(deployenator) and daws_config.has_option(deployenator, option) else []
def _prod_sg_rules(self):
"""
Return sg rules for all production networks
"""
vpc_config_file = "disco_vpc.ini"
vpc_config = ConfigParser()
vpc_config.read(vpc_config_file)
prod_section = "envtype:production"
self.assertTrue(vpc_config.has_section(prod_section))
# Since we only have one prod we shouldn't need duplicate config
# but if this changes in the future we'll need to adjust the check
# to inspect env and envtypes.
self.assertFalse(vpc_config.has_section("env:prod"))
sg_rule_names = [name for name in vpc_config.options(prod_section) if name.endswith("sg_rules")]
return [vpc_config.get(prod_section, name) for name in sg_rule_names]
def test_prod_ssh_sg_rules(self):
"""
Ensure that prod firewall rules don't allow ssh traffic.
"""
sg_rules = ",".join(self._prod_sg_rules())
allowed_ips = self._allowed_ips()
# We only allow port 22, TCP open from non-ip sources (other subnets)
# or deployenator host
source_regex = re.compile(r'[a-zA-Z]+[0-9a-zA-Z]+')
for sg_rule in sg_rules.split(","):
sg_rule = sg_rule.split()
source = sg_rule[1]
# ssh protocol is 'tcp' or protocol number 6.
if (sg_rule[0] == 'tcp' or sg_rule[0] == '6') and \
self._port_in_sg_rule(SSH_PORT, sg_rule[2:]) and not source_regex.search(source):
try:
self.assertIn(source.split("/")[0], allowed_ips)
except AssertionError:
logging.exception("Production firewall has port 22 open to invalid host.")
raise
| amplifylitco/asiaq | tests/unit/test_secure_config.py | Python | bsd-2-clause | 4,350 |
'''
Test Battery for the SQL3 CSV Import
Created on Sep 18, 2013
@author: marcelo, [email protected]
@changelog:
'''
import unittest
import sys
import uuid
from cm2c.commons.gen.utils import get_tmp_fn
from cm2c.csvimport.sql3load import sql3load
#--
class Test(unittest.TestCase):
def setUp(self):
#sys.stderr.write("Creating sql3load class instance\n")
# self.s3_template = [{'name': 'text'}, {'age': 'integer'}, {'weigth': 'float'}]
self.s3_template = [ ('name', 'text'), ('age', 'integer'), ('weigth', 'float') ]
self.s4_template = [ ('asn', 'text'), ('prefix', 'text'), ('visible', 'integer') ]
#se lf.s3l = sql3load(self.s3_template)
self.s3l = sql3load(self.s3_template, get_tmp_fn(".db") )
## end
def tearDown(self):
pass
## end
def testClassInstantiation(self):
# test relies on logic put in the setUp method
pass
def testRowInsertion(self):
r = self.s3l._insert_row({'name': 'marcelo', 'age': 41, 'weigth': 125.0})
self.assertTrue(r, "record not inserted succesfully")
r = self.s3l.get_rowcount()
self.assertEqual(r, 1, "rows should be exactly one, but instead count %s" % (r))
def testRowRetrieval(self):
self.s3l._insert_row({'name': 'marcelo', 'age': 41, 'weigth': 125.0})
r = self.s3l.query("1=1")
self.assertTrue(r, 'query did not return a valid value')
#sys.stderr.write(str(r[0]))
dr = dict(r[0])
# sys.stderr.write("%s" % (dr))
self.assertTrue( dr['age'] == 41, 'age should be 41' )
pass
def testImportCommaSeparatedFile(self):
r = self.s3l.importFile("test/test-import.txt")
self.assertTrue(r>0, "Number of lines read should be larger than 0 but is %s" % (r))
#
r = self.s3l.query("name = 'marcelo'")
self.assertTrue(r[0]['age']==41, "marcelo's age should be 41 but is %s" % (r[0]['age']))
def testImportTabSeparatedFile(self):
self.s3l2 = sql3load(self.s3_template, get_tmp_fn("islas.db"), "\t")
r = self.s3l2.importFile("test/test-import2.txt")
# print "imported rows %s" % (r)
self.assertTrue(r>3, "Number of lines read should be larger than 3 but is %s" % (r))
def testImportDifferentCommentMarkFile(self):
self.s3l2 = sql3load(self.s4_template, get_tmp_fn("riswhois.db"), " ", comments_mark='%')
r = self.s3l2.importFile("test/test-import3.txt")
# print "imported rows %s" % (r)
self.assertTrue(r>2, "Number of lines read should be larger than 2 but is %s" % (r))
#def testRowCount1(self):
# r = self.s3l.get_rowcount()
# self.assertEqual(r, 1, "rows should be exactly one, but instead count %s" % (r))
## end class Test
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
| carlosm3011/cm2c-python-lib | test/test_sql3load_p1.py | Python | bsd-2-clause | 2,886 |
"""
This is the default settings module.
The values of any variables that might differ between deploys of the package should either be None or should be
calculated at runtime. This file is under version control so it must work across all deploys. All of the tests should
pass or be skipped with no local settings overriding the values here.
What variables should be included in this file?
Any settings that are used by library code (i.e. not scripts) to run, especially if test code needs to check whether
some variable is None to determine whether corresponding hardware is present. On the other hand, if you need a hardware
setting just to run a script, it's better not add a None default here.
"""
import os as _os
import socket as _socket
# TODO: move away from allowing HOSTNAME to determine code paths in analysis; for data collection, use CRYOSTAT.
HOSTNAME = _socket.gethostname()
# This is the directory into which data will be written. The default should always exist so that test code can use it.
if _os.path.exists(_os.path.join('/data', _socket.gethostname())):
BASE_DATA_DIR = _os.path.join('/data', _socket.gethostname())
else:
BASE_DATA_DIR = '/tmp'
# The name of the cryostat.
CRYOSTAT = None
# Information about the current cooldown.
COOLDOWN = None
# The path of the directory containing log files.
LOG_DIR = '/tmp'
# The path of the directory containing temperature log files.
TEMPERATURE_LOG_DIR = None
# ROACH1
ROACH1_IP = None
ROACH1_VALON = None
ROACH1_HOST_IP = None
# ROACH2
ROACH2_IP = None
ROACH2_VALON = None
ROACH2_HOST_IP = None
ROACH2_GBE_HOST_IP = None
# Analog
MARK2_VALON = None
| ColumbiaCMB/kid_readout | kid_readout/settings/_default.py | Python | bsd-2-clause | 1,635 |
from __future__ import unicode_literals
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Page.created'
db.add_column(u'pages_page', 'created',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
# Adding field 'Page.updated'
db.add_column(u'pages_page', 'updated',
self.gf('django.db.models.fields.DateTimeField')(null=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Page.created'
db.delete_column(u'pages_page', 'created')
# Deleting field 'Page.updated'
db.delete_column(u'pages_page', 'updated')
models = {
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'generic.assignedkeyword': {
'Meta': {'ordering': "('_order',)", 'object_name': 'AssignedKeyword'},
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'assignments'", 'to': u"orm['generic.Keyword']"}),
'object_pk': ('django.db.models.fields.IntegerField', [], {})
},
u'generic.keyword': {
'Meta': {'object_name': 'Keyword'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'})
},
u'pages.link': {
'Meta': {'ordering': "('_order',)", 'object_name': 'Link', '_ormbases': [u'pages.Page']},
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'pages.page': {
'Meta': {'ordering': "('titles',)", 'object_name': 'Page'},
'_meta_title': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'_order': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'content_model': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'gen_description': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_menus': ('mezzanine.pages.fields.MenusField', [], {'default': '(1, 2, 3)', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'in_sitemap': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
#'keywords': ('mezzanine.generic.fields.KeywordsField', [], {'object_id_field': "'object_pk'", 'to': u"orm['generic.AssignedKeyword']", 'frozen_by_south': 'True'}),
'keywords_string': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['pages.Page']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'short_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['sites.Site']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'titles': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
u'pages.richtextpage': {
'Meta': {'ordering': "('_order',)", 'object_name': 'RichTextPage', '_ormbases': [u'pages.Page']},
'content': ('mezzanine.core.fields.RichTextField', [], {}),
u'page_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['pages.Page']", 'unique': 'True', 'primary_key': 'True'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['pages']
| eRestin/MezzGIS | mezzanine/pages/migrations/0014_auto__add_field_page_created__add_field_page_updated.py | Python | bsd-2-clause | 6,196 |
#!/usr/bin/env python
import urlparse
from flup.server.fcgi import WSGIServer
import connections
import httplog
import get_next_lines
######################################################################
######################################################################
## ##
## Constants. ##
## ##
######################################################################
######################################################################
# Address to bind to.
BIND_ADDRESS = ('127.0.0.1', 9000)
# Default number of lines.
DEFAULT_LINES = 100
# Not found error page.
NOT_FOUND = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\
<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \
\"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd\">\
<html xmlns=\"http://www.w3.org/1999/xhtml\">\
<head>\
<title>Not found</title>\
</head>\
<body>\
<h1>HTTP/1.1 404 Not found</h1>\
</body>\
</html>"
######################################################################
######################################################################
## ##
## myapp. ##
## ##
######################################################################
######################################################################
def myapp(environ, start_response):
# Get QUERY_STRING.
query_string = environ.get("QUERY_STRING")
if query_string == None:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
args = urlparse.parse_qs(query_string)
arg = args.get("script")
if arg == None:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
script = arg[0]
if script == "connections":
return connections.connections(start_response)
elif script == "httplog":
arg = args.get("lines", [DEFAULT_LINES])[0]
try:
nlines = int(arg)
except ValueError:
nlines = DEFAULT_LINES
return httplog.httplog(start_response, nlines)
elif script == "get_next_lines":
arg = args.get("date")
if arg == None:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
date = arg[0]
if len(date) != 8:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
arg = args.get("offset")
if arg == None:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
try:
offset = int(arg[0])
except ValueError:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
arg = args.get("lines", [DEFAULT_LINES])[0]
try:
nlines = int(arg)
except ValueError:
nlines = DEFAULT_LINES
return get_next_lines.get_next_lines(start_response, date, offset, nlines)
else:
start_response("404 Not found", [("Content-Type", "text/html")])
return [NOT_FOUND]
if __name__ == "__main__":
WSGIServer(myapp, bindAddress = BIND_ADDRESS, multiplexed = True).run()
| guidoreina/gsniffer | html/dispatch.py | Python | bsd-2-clause | 3,532 |
from django.contrib import admin
from models import Click
class ClickAdmin(admin.ModelAdmin):
list_display = ('content_object', 'content_type', 'referer', 'user', 'date')
list_display_links = ('content_object',)
list_filter = ('date', 'referer','user')
admin.site.register(Click, ClickAdmin)
| anscii/django-goto-track | goto_track/admin.py | Python | bsd-2-clause | 306 |
import numpy as np
from collections import deque
# Build a 2D grid from an unlabeled set of lenslet centers. This
# method is ad-hoc, but seems to work fairly reliably.
#
# Here we iterate over the putative matches, looking for pixels that
# at within 2 pixels of the EXPECTED_LENSLET_SIZE. This seems to be a
# very reliable way of finding neighboring lenslets. Note that we
# throw away any pixels here that don't have exactly four matches.
#
def build_grid(putative_centers, expected_lenslet_size, adjacency_tolerance, b_estimate_rotation = True):
# Optionally, first estimate the rotation of the lenslet grid using random sampling (+-45 degrees)
lenslet_rotation = 0
ROTATION_SAMPLE_SIZE = 400
if b_estimate_rotation:
import random
if len(putative_centers) < ROTATION_SAMPLE_SIZE:
print "ERROR: there are not enough lenslet centers to perform calibration. Check you calibration image and try again."
raise SystemExit
rand_centers = random.sample(putative_centers, ROTATION_SAMPLE_SIZE)
rotation_samples = []
for rc in rand_centers:
diff = np.cast['float32'](putative_centers-np.tile(rc,
(putative_centers.shape[0],1)))
dist = np.abs(np.sqrt(np.sum(diff**2,1))-expected_lenslet_size)
adj = np.nonzero(dist < adjacency_tolerance)[0]
for a in adj:
dr = putative_centers[a][0] - rc[0]
dc = putative_centers[a][1] - rc[1]
theta = np.arctan2(dc,dr)
theta = ((theta + (5*np.pi/4)) % (np.pi/2)) - (np.pi/4)
rotation_samples.append(theta)
lenslet_rotation = np.median(rotation_samples)
print( '\t--> Estimating lenslet rotation (n=%d): %f degrees' %
(len(rotation_samples),
np.rad2deg(lenslet_rotation)) )
else:
print( '\t--> Assuming lenslet basis aligned with sensor.')
# First we find the lenslet closest to the center.
num_putative_lenslets = putative_centers.shape[0]
center_of_mass = np.mean(putative_centers,0)
diff = putative_centers - np.tile(center_of_mass, (num_putative_lenslets,1))
dist = np.abs(np.sqrt(np.sum(diff**2,1)))
center_lenslet = np.argmin(dist)
print ( '\t--> Found center lenslet %d near pixel [ %d %d ]' %
(center_lenslet,
putative_centers[center_lenslet,1],
putative_centers[center_lenslet,0]) )
# Pull off the first entry to get things started and mark it as touched
touched = np.zeros(putative_centers.shape[0])
pending = deque( [(putative_centers[center_lenslet,:], 0, 0)] )
touched[center_lenslet] = 1
# Walk the graph, numbering lenslets as they are reached from
# the central node. Our goals here is to give each lenslet a
# unique 2D coordinate on a 2D grid, where the center lenslet
# is (0,0). This is also fairly efficient, since we rapidly
# narrow down the number of lenslets we have to search through
# as the algorithm proceeds.
iter_count = 0
lenslets = []
while (len(pending) > 0):
current_match = pending.popleft()
current_center = current_match[0]
r = current_match[1]
c = current_match[2]
# OPTIMIZATION: We periodically prune the putative_centers
# list and reset the touched list, since this
# significantly increases the speed of matching towards
# the end.
if (iter_count % 300 == 0):
untouched_entries = np.nonzero(touched == 0)[0]
putative_centers = putative_centers[untouched_entries,:]
touched = np.zeros(putative_centers.shape[0])
if (iter_count % 1000 == 0):
print ('\t Iteration %d at [%d, %d] ( %d pending, %d left in match pool )' %
(iter_count, c, r, len(pending), len(putative_centers)))
iter_count = iter_count + 1
# Appending this lenslet to the master list along with its location in the grid
lenslets.append((current_center[0], current_center[1], r, c))
# Now find the neighbors of this lenslet, and add them to the pending queue.
diff = np.cast['float32'](putative_centers-np.tile(current_center,
(putative_centers.shape[0],1)))
dist = np.abs(np.sqrt(np.sum(diff**2,1))-expected_lenslet_size)
matches = np.nonzero(dist < adjacency_tolerance)[0]
for m in matches:
if not touched[m]:
touched[m] = 1
dr = putative_centers[m][0] - current_center[0]
dc = putative_centers[m][1] - current_center[1]
dr_rot = dr*np.cos(-lenslet_rotation) - dc*np.sin(-lenslet_rotation)
dc_rot = dr*np.sin(-lenslet_rotation) + dc*np.cos(-lenslet_rotation)
#print dr, dc, dr_rot, dc_rot
if np.abs(dc_rot) < adjacency_tolerance:
if dr_rot > 0:
# Up (row+1, col)
pending.append((putative_centers[m],r+1,c))
else:
# Down (row-1, col)
pending.append((putative_centers[m],r-1,c))
elif np.abs(dr_rot) < adjacency_tolerance:
if dc_rot > 0:
# Right (row, col+1)
pending.append((putative_centers[m],r,c+1))
else:
# Left (row, col-1)
pending.append((putative_centers[m],r,c-1))
else:
# Silently discard any lenslets that aren't
# either immediately above or below this
# lenslet (to within a 2 pixel margin).
pass
# Pick one of the lenslets and compute roughly what lenslet it is
# in the original image. We use this information to recenter the
# lenslet transform. This is not perfect, btw, but does roughly
# center the rectified image where the old image was.
l = lenslets[0]
import math
minr = -math.floor(l[0] / expected_lenslet_size)
minc = -math.floor(l[1] / expected_lenslet_size)
# Recenter the lenslet coordinates. Note that we add an
# additional 0.5 lenslet offset so that the lenslet centers
# themselves appear at [0.5,0.5], [0.5, 1.5], ... etc.
#
print '\t--> Recentering lenslets by [%d, %d].' % (minc, minr)
adjusted_lenslets = np.zeros((len(lenslets),4))
m = 0
for l in lenslets:
adjusted_lenslets[m,:] = np.array([l[0], l[1], float(l[2]-minr-0.5), float(l[3]-minc-0.5)])
m += 1
return adjusted_lenslets
| sophie63/FlyLFM | stanford_lfanalyze_v0.4/lflib/calibration/grid.py | Python | bsd-2-clause | 6,790 |
"""Minimal class for planetary ellipsoids"""
from math import sqrt
class Ellipsoid:
"""
generate reference ellipsoid parameters
https://en.wikibooks.org/wiki/PROJ.4#Spheroid
https://nssdc.gsfc.nasa.gov/planetary/factsheet/index.html
as everywhere else in this program, distance units are METERS
"""
def __init__(self, model: str = "wgs84"):
"""
feel free to suggest additional ellipsoids
Parameters
----------
model : str
name of ellipsoid
"""
if model == "wgs84":
"""https://en.wikipedia.org/wiki/World_Geodetic_System#WGS84"""
self.semimajor_axis = 6378137.0
self.semiminor_axis = 6356752.31424518
elif model == "wgs72":
self.semimajor_axis = 6378135.0
self.semiminor_axis = 6356750.52001609
elif model == "grs80":
"""https://en.wikipedia.org/wiki/GRS_80"""
self.semimajor_axis = 6378137.0
self.semiminor_axis = 6356752.31414036
elif model == "clarke1866":
self.semimajor_axis = 6378206.4
self.semiminor_axis = 6356583.8
elif model == "mars":
"""
https://tharsis.gsfc.nasa.gov/geodesy.html
"""
self.semimajor_axis = 3396900.0
self.semiminor_axis = 3376097.80585952
elif model == "moon":
self.semimajor_axis = 1738000.0
self.semiminor_axis = self.semimajor_axis
elif model == "venus":
self.semimajor_axis = 6051000.0
self.semiminor_axis = self.semimajor_axis
elif model == "jupiter":
self.semimajor_axis = 71492000.0
self.semiminor_axis = 66770054.3475922
elif model == "io":
"""
https://doi.org/10.1006/icar.1998.5987
"""
self.semimajor_axis = 1829.7
self.semiminor_axis = 1815.8
elif model == "pluto":
self.semimajor_axis = 1187000.0
self.semiminor_axis = self.semimajor_axis
else:
raise NotImplementedError(
f"{model} model not implemented, let us know and we will add it (or make a pull request)"
)
self.flattening = (self.semimajor_axis - self.semiminor_axis) / self.semimajor_axis
self.thirdflattening = (self.semimajor_axis - self.semiminor_axis) / (
self.semimajor_axis + self.semiminor_axis
)
self.eccentricity = sqrt(2 * self.flattening - self.flattening ** 2)
| scienceopen/pymap3d | src/pymap3d/ellipsoid.py | Python | bsd-2-clause | 2,581 |
import os
import logging
from django.conf import settings
from django.contrib.gis.db import models
from django.core.exceptions import ValidationError
from django.core.validators import MinValueValidator
from django.template.defaultfilters import slugify
from django.utils.translation import get_language, ugettext_lazy as _
import simplekml
from mapentity.models import MapEntityMixin
from mapentity.serializers import plain_text
from geotrek.authent.models import StructureRelated
from geotrek.core.models import Path, Topology
from geotrek.common.utils import intersecting, classproperty
from geotrek.common.mixins import (PicturesMixin, PublishableMixin,
PictogramMixin, OptionalPictogramMixin)
from geotrek.common.models import Theme
from geotrek.maintenance.models import Intervention, Project
from geotrek.tourism import models as tourism_models
from .templatetags import trekking_tags
logger = logging.getLogger(__name__)
class Trek(StructureRelated, PicturesMixin, PublishableMixin, MapEntityMixin, Topology):
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
departure = models.CharField(verbose_name=_(u"Departure"), max_length=128, blank=True,
help_text=_(u"Departure description"), db_column='depart')
arrival = models.CharField(verbose_name=_(u"Arrival"), max_length=128, blank=True,
help_text=_(u"Arrival description"), db_column='arrivee')
description_teaser = models.TextField(verbose_name=_(u"Description teaser"), blank=True,
help_text=_(u"A brief summary (map pop-ups)"), db_column='chapeau')
description = models.TextField(verbose_name=_(u"Description"), blank=True, db_column='description',
help_text=_(u"Complete description"))
ambiance = models.TextField(verbose_name=_(u"Ambiance"), blank=True, db_column='ambiance',
help_text=_(u"Main attraction and interest"))
access = models.TextField(verbose_name=_(u"Access"), blank=True, db_column='acces',
help_text=_(u"Best way to go"))
disabled_infrastructure = models.TextField(verbose_name=_(u"Disabled infrastructure"), db_column='handicap',
blank=True, help_text=_(u"Any specific infrastructure"))
duration = models.FloatField(verbose_name=_(u"Duration"), default=0, blank=True, db_column='duree',
help_text=_(u"In decimal hours (ex. 1.5 for 1 h 30)"),
validators=[MinValueValidator(0)])
is_park_centered = models.BooleanField(verbose_name=_(u"Is in the midst of the park"), db_column='coeur',
help_text=_(u"Crosses center of park"))
advised_parking = models.CharField(verbose_name=_(u"Advised parking"), max_length=128, blank=True, db_column='parking',
help_text=_(u"Where to park"))
parking_location = models.PointField(verbose_name=_(u"Parking location"), db_column='geom_parking',
srid=settings.SRID, spatial_index=False, blank=True, null=True)
public_transport = models.TextField(verbose_name=_(u"Public transport"), blank=True, db_column='transport',
help_text=_(u"Train, bus (see web links)"))
advice = models.TextField(verbose_name=_(u"Advice"), blank=True, db_column='recommandation',
help_text=_(u"Risks, danger, best period, ..."))
themes = models.ManyToManyField(Theme, related_name="treks",
db_table="o_r_itineraire_theme", blank=True, null=True, verbose_name=_(u"Themes"),
help_text=_(u"Main theme(s)"))
networks = models.ManyToManyField('TrekNetwork', related_name="treks",
db_table="o_r_itineraire_reseau", blank=True, null=True, verbose_name=_(u"Networks"),
help_text=_(u"Hiking networks"))
practice = models.ForeignKey('Practice', related_name="treks",
blank=True, null=True, verbose_name=_(u"Practice"), db_column='pratique')
accessibilities = models.ManyToManyField('Accessibility', related_name="treks",
db_table="o_r_itineraire_accessibilite", blank=True, null=True,
verbose_name=_(u"Accessibilities"))
route = models.ForeignKey('Route', related_name='treks',
blank=True, null=True, verbose_name=_(u"Route"), db_column='parcours')
difficulty = models.ForeignKey('DifficultyLevel', related_name='treks',
blank=True, null=True, verbose_name=_(u"Difficulty"), db_column='difficulte')
web_links = models.ManyToManyField('WebLink', related_name="treks",
db_table="o_r_itineraire_web", blank=True, null=True, verbose_name=_(u"Web links"),
help_text=_(u"External resources"))
related_treks = models.ManyToManyField('self', through='TrekRelationship',
verbose_name=_(u"Related treks"), symmetrical=False,
help_text=_(u"Connections between treks"),
related_name='related_treks+') # Hide reverse attribute
parent = models.ForeignKey('self', verbose_name=_(u"Parent"), db_column='parent', blank=True, null=True,
related_name='children')
information_desks = models.ManyToManyField(tourism_models.InformationDesk, related_name='treks',
db_table="o_r_itineraire_renseignement", blank=True, null=True,
verbose_name=_(u"Information desks"),
help_text=_(u"Where to obtain information"))
points_reference = models.MultiPointField(verbose_name=_(u"Points of reference"), db_column='geom_points_reference',
srid=settings.SRID, spatial_index=False, blank=True, null=True)
source = models.ManyToManyField('common.RecordSource',
null=True, blank=True, related_name='treks',
verbose_name=_("Source"), db_table='o_r_itineraire_source')
eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, db_column='id_externe')
eid2 = models.CharField(verbose_name=_(u"Second external id"), max_length=128, blank=True, db_column='id_externe2')
objects = Topology.get_manager_cls(models.GeoManager)()
category_id_prefix = 'T'
class Meta:
db_table = 'o_t_itineraire'
verbose_name = _(u"Trek")
verbose_name_plural = _(u"Treks")
ordering = ['name']
def __unicode__(self):
return self.name
@models.permalink
def get_document_public_url(self):
""" Override ``geotrek.common.mixins.PublishableMixin``
"""
return ('trekking:trek_document_public', [], {'lang': get_language(), 'pk': self.pk, 'slug': self.slug})
@property
def related(self):
return self.related_treks.exclude(deleted=True).exclude(pk=self.pk).distinct()
@classproperty
def related_verbose_name(cls):
return _("Related treks")
@property
def relationships(self):
# Does not matter if a or b
return TrekRelationship.objects.filter(trek_a=self)
@property
def published_relationships(self):
return self.relationships.filter(trek_b__published=True)
@property
def poi_types(self):
if settings.TREKKING_TOPOLOGY_ENABLED:
# Can't use values_list and must add 'ordering' because of bug:
# https://code.djangoproject.com/ticket/14930
values = self.pois.values('ordering', 'type')
else:
values = self.pois.values('type')
pks = [value['type'] for value in values]
return POIType.objects.filter(pk__in=set(pks))
@property
def length_kilometer(self):
return "%.1f" % (self.length / 1000.0)
@property
def networks_display(self):
return ', '.join([unicode(n) for n in self.networks.all()])
@property
def districts_display(self):
return ', '.join([unicode(d) for d in self.districts])
@property
def themes_display(self):
return ', '.join([unicode(n) for n in self.themes.all()])
@property
def city_departure(self):
cities = self.cities
return unicode(cities[0]) if len(cities) > 0 else ''
def kml(self):
""" Exports trek into KML format, add geometry as linestring and POI
as place marks """
kml = simplekml.Kml()
# Main itinerary
geom3d = self.geom_3d.transform(4326, clone=True) # KML uses WGS84
line = kml.newlinestring(name=self.name,
description=plain_text(self.description),
coords=geom3d.coords)
line.style.linestyle.color = simplekml.Color.red # Red
line.style.linestyle.width = 4 # pixels
# Place marks
for poi in self.pois:
place = poi.geom_3d.transform(settings.API_SRID, clone=True)
kml.newpoint(name=poi.name,
description=plain_text(poi.description),
coords=[place.coords])
return kml._genkml()
def has_geom_valid(self):
"""A trek should be a LineString, even if it's a loop.
"""
return super(Trek, self).has_geom_valid() and self.geom.geom_type.lower() == 'linestring'
@property
def duration_pretty(self):
return trekking_tags.duration(self.duration)
@classproperty
def duration_pretty_verbose_name(cls):
return _("Formated duration")
@classmethod
def path_treks(cls, path):
treks = cls.objects.existing().filter(aggregations__path=path)
# The following part prevents conflict with default trek ordering
# ProgrammingError: SELECT DISTINCT ON expressions must match initial ORDER BY expressions
return treks.order_by('topo_object').distinct('topo_object')
@classmethod
def topology_treks(cls, topology):
if settings.TREKKING_TOPOLOGY_ENABLED:
qs = cls.overlapping(topology)
else:
area = topology.geom.buffer(settings.TREK_POI_INTERSECTION_MARGIN)
qs = cls.objects.existing().filter(geom__intersects=area)
return qs
@classmethod
def published_topology_treks(cls, topology):
return cls.topology_treks(topology).filter(published=True)
# Rando v1 compat
@property
def usages(self):
return [self.practice] if self.practice else []
@classmethod
def get_create_label(cls):
return _(u"Add a new trek")
@property
def children_id(self):
return list(self.children.order_by('name').values_list('id', flat=True))
@property
def previous_id(self):
if self.parent is None:
return None
children = self.parent.children_id
try:
return children[children.index(self.id) - 1]
except IndexError:
return None
@property
def next_id(self):
if self.parent is None:
return None
children = self.parent.children_id
try:
return children[children.index(self.id) + 1]
except IndexError:
return None
def clean(self):
if self.parent and self.parent == self:
raise ValidationError(_(u"Cannot use itself as parent trek."))
if self.parent and self.parent.parent:
raise ValidationError(_(u"Cannot use a a child trek as parent trek."))
@property
def prefixed_category_id(self):
if settings.SPLIT_TREKS_CATEGORIES_BY_PRACTICE and self.practice:
return '{prefix}{id}'.format(prefix=self.category_id_prefix, id=self.practice.id)
else:
return self.category_id_prefix
def distance(self, to_cls):
if self.practice and self.practice.distance is not None:
return self.practice.distance
else:
return settings.TOURISM_INTERSECTION_MARGIN
def is_public(self):
return self.any_published or (self.parent and self.parent.any_published)
def save(self, *args, **kwargs):
if self.pk is not None and kwargs.get('update_fields', None) is None:
field_names = set()
for field in self._meta.concrete_fields:
if not field.primary_key and not hasattr(field, 'through'):
field_names.add(field.attname)
old_trek = Trek.objects.get(pk=self.pk)
if self.geom is not None and old_trek.geom.equals_exact(self.geom, tolerance=0.00001):
field_names.remove('geom')
if self.geom_3d is not None and old_trek.geom_3d.equals_exact(self.geom_3d, tolerance=0.00001):
field_names.remove('geom_3d')
return super(Trek, self).save(update_fields=field_names, *args, **kwargs)
super(Trek, self).save(*args, **kwargs)
Path.add_property('treks', Trek.path_treks, _(u"Treks"))
Topology.add_property('treks', Trek.topology_treks, _(u"Treks"))
if settings.HIDE_PUBLISHED_TREKS_IN_TOPOLOGIES:
Topology.add_property('published_treks', lambda self: [], _(u"Published treks"))
else:
Topology.add_property('published_treks', lambda self: intersecting(Trek, self).filter(published=True), _(u"Published treks"))
Intervention.add_property('treks', lambda self: self.topology.treks if self.topology else [], _(u"Treks"))
Project.add_property('treks', lambda self: self.edges_by_attr('treks'), _(u"Treks"))
tourism_models.TouristicContent.add_property('treks', lambda self: intersecting(Trek, self), _(u"Treks"))
tourism_models.TouristicContent.add_property('published_treks', lambda self: intersecting(Trek, self).filter(published=True), _(u"Published treks"))
tourism_models.TouristicEvent.add_property('treks', lambda self: intersecting(Trek, self), _(u"Treks"))
tourism_models.TouristicEvent.add_property('published_treks', lambda self: intersecting(Trek, self).filter(published=True), _(u"Published treks"))
class TrekRelationshipManager(models.Manager):
use_for_related_fields = True
def get_queryset(self):
# Select treks foreign keys by default
qs = super(TrekRelationshipManager, self).get_queryset().select_related('trek_a', 'trek_b')
# Exclude deleted treks
return qs.exclude(trek_a__deleted=True).exclude(trek_b__deleted=True)
class TrekRelationship(models.Model):
"""
Relationships between treks : symmetrical aspect is managed by a trigger that
duplicates all couples (trek_a, trek_b)
"""
has_common_departure = models.BooleanField(verbose_name=_(u"Common departure"), db_column='depart_commun', default=False)
has_common_edge = models.BooleanField(verbose_name=_(u"Common edge"), db_column='troncons_communs', default=False)
is_circuit_step = models.BooleanField(verbose_name=_(u"Circuit step"), db_column='etape_circuit', default=False)
trek_a = models.ForeignKey(Trek, related_name="trek_relationship_a", db_column='itineraire_a')
trek_b = models.ForeignKey(Trek, related_name="trek_relationship_b", db_column='itineraire_b', verbose_name=_(u"Trek"))
objects = TrekRelationshipManager()
class Meta:
db_table = 'o_r_itineraire_itineraire'
verbose_name = _(u"Trek relationship")
verbose_name_plural = _(u"Trek relationships")
unique_together = ('trek_a', 'trek_b')
def __unicode__(self):
return u"%s <--> %s" % (self.trek_a, self.trek_b)
@property
def relation(self):
return u"%s %s%s%s" % (
self.trek_b.name_display,
_("Departure") if self.has_common_departure else '',
_("Path") if self.has_common_edge else '',
_("Circuit") if self.is_circuit_step else ''
)
@property
def relation_display(self):
return self.relation
class TrekNetwork(PictogramMixin):
network = models.CharField(verbose_name=_(u"Name"), max_length=128, db_column='reseau')
class Meta:
db_table = 'o_b_reseau'
verbose_name = _(u"Trek network")
verbose_name_plural = _(u"Trek networks")
ordering = ['network']
def __unicode__(self):
return self.network
class Practice(PictogramMixin):
name = models.CharField(verbose_name=_(u"Name"), max_length=128, db_column='nom')
distance = models.IntegerField(verbose_name=_(u"Distance"), blank=True, null=True, db_column='distance',
help_text=_(u"Touristic contents and events will associate within this distance (meters)"))
cirkwi = models.ForeignKey('cirkwi.CirkwiLocomotion', verbose_name=_(u"Cirkwi locomotion"), null=True, blank=True)
class Meta:
db_table = 'o_b_pratique'
verbose_name = _(u"Practice")
verbose_name_plural = _(u"Practices")
ordering = ['name']
def __unicode__(self):
return self.name
@property
def slug(self):
return slugify(self.name) or str(self.pk)
class Accessibility(OptionalPictogramMixin):
name = models.CharField(verbose_name=_(u"Name"), max_length=128, db_column='nom')
cirkwi = models.ForeignKey('cirkwi.CirkwiTag', verbose_name=_(u"Cirkwi tag"), null=True, blank=True)
id_prefix = 'A'
class Meta:
db_table = 'o_b_accessibilite'
verbose_name = _(u"Accessibility")
verbose_name_plural = _(u"Accessibilities")
ordering = ['name']
def __unicode__(self):
return self.name
@property
def prefixed_id(self):
return '{prefix}{id}'.format(prefix=self.id_prefix, id=self.id)
@property
def slug(self):
return slugify(self.name) or str(self.pk)
class Route(OptionalPictogramMixin):
route = models.CharField(verbose_name=_(u"Name"), max_length=128, db_column='parcours')
class Meta:
db_table = 'o_b_parcours'
verbose_name = _(u"Route")
verbose_name_plural = _(u"Routes")
ordering = ['route']
def __unicode__(self):
return self.route
class DifficultyLevel(OptionalPictogramMixin):
"""We use an IntegerField for id, since we want to edit it in Admin.
This column is used to order difficulty levels, especially in public website
where treks are filtered by difficulty ids.
"""
id = models.IntegerField(primary_key=True)
difficulty = models.CharField(verbose_name=_(u"Difficulty level"),
max_length=128, db_column='difficulte')
cirkwi_level = models.IntegerField(verbose_name=_(u"Cirkwi level"), blank=True, null=True,
db_column='niveau_cirkwi', help_text=_(u"Between 1 and 8"))
cirkwi = models.ForeignKey('cirkwi.CirkwiTag', verbose_name=_(u"Cirkwi tag"), null=True, blank=True)
class Meta:
db_table = 'o_b_difficulte'
verbose_name = _(u"Difficulty level")
verbose_name_plural = _(u"Difficulty levels")
ordering = ['id']
def __unicode__(self):
return self.difficulty
def save(self, *args, **kwargs):
"""Manually auto-increment ids"""
if not self.id:
try:
last = self.__class__.objects.all().order_by('-id')[0]
self.id = last.id + 1
except IndexError:
self.id = 1
super(DifficultyLevel, self).save(*args, **kwargs)
class WebLinkManager(models.Manager):
def get_queryset(self):
return super(WebLinkManager, self).get_queryset().select_related('category')
class WebLink(models.Model):
name = models.CharField(verbose_name=_(u"Name"), max_length=128, db_column='nom')
url = models.URLField(verbose_name=_(u"URL"), max_length=128, db_column='url')
category = models.ForeignKey('WebLinkCategory', verbose_name=_(u"Category"),
related_name='links', null=True, blank=True,
db_column='categorie')
objects = WebLinkManager()
class Meta:
db_table = 'o_t_web'
verbose_name = _(u"Web link")
verbose_name_plural = _(u"Web links")
ordering = ['name']
def __unicode__(self):
category = "%s - " % self.category.label if self.category else ""
return u"%s%s (%s)" % (category, self.name, self.url)
@classmethod
@models.permalink
def get_add_url(cls):
return ('trekking:weblink_add', )
class WebLinkCategory(PictogramMixin):
label = models.CharField(verbose_name=_(u"Label"), max_length=128, db_column='nom')
class Meta:
db_table = 'o_b_web_category'
verbose_name = _(u"Web link category")
verbose_name_plural = _(u"Web link categories")
ordering = ['label']
def __unicode__(self):
return u"%s" % self.label
class POIManager(models.GeoManager):
def get_queryset(self):
return super(POIManager, self).get_queryset().select_related('type', 'structure')
class POI(StructureRelated, PicturesMixin, PublishableMixin, MapEntityMixin, Topology):
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
description = models.TextField(verbose_name=_(u"Description"), db_column='description',
help_text=_(u"History, details, ..."))
type = models.ForeignKey('POIType', related_name='pois', verbose_name=_(u"Type"), db_column='type')
eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, db_column='id_externe')
class Meta:
db_table = 'o_t_poi'
verbose_name = _(u"POI")
verbose_name_plural = _(u"POI")
# Override default manager
objects = Topology.get_manager_cls(POIManager)()
def __unicode__(self):
return u"%s (%s)" % (self.name, self.type)
@models.permalink
def get_document_public_url(self):
""" Override ``geotrek.common.mixins.PublishableMixin``
"""
return ('trekking:poi_document_public', [], {'lang': get_language(), 'pk': self.pk, 'slug': self.slug})
def save(self, *args, **kwargs):
super(POI, self).save(*args, **kwargs)
# Invalidate treks map
for trek in self.treks.all():
try:
os.remove(trek.get_map_image_path())
except OSError:
pass
@property
def type_display(self):
return unicode(self.type)
@property
def serializable_type(self):
return {'label': self.type.label,
'pictogram': self.type.get_pictogram_url()}
@classmethod
def path_pois(cls, path):
return cls.objects.existing().filter(aggregations__path=path).distinct('pk')
@classmethod
def topology_pois(cls, topology):
if settings.TREKKING_TOPOLOGY_ENABLED:
qs = cls.overlapping(topology)
else:
area = topology.geom.buffer(settings.TREK_POI_INTERSECTION_MARGIN)
qs = cls.objects.existing().filter(geom__intersects=area)
return qs
@classmethod
def published_topology_pois(cls, topology):
return cls.topology_pois(topology).filter(published=True)
def distance(self, to_cls):
return settings.TOURISM_INTERSECTION_MARGIN
Path.add_property('pois', POI.path_pois, _(u"POIs"))
Topology.add_property('pois', POI.topology_pois, _(u"POIs"))
Topology.add_property('published_pois', POI.published_topology_pois, _(u"Published POIs"))
Intervention.add_property('pois', lambda self: self.topology.pois if self.topology else [], _(u"POIs"))
Project.add_property('pois', lambda self: self.edges_by_attr('pois'), _(u"POIs"))
tourism_models.TouristicContent.add_property('pois', lambda self: intersecting(POI, self), _(u"POIs"))
tourism_models.TouristicContent.add_property('published_pois', lambda self: intersecting(POI, self).filter(published=True), _(u"Published POIs"))
tourism_models.TouristicEvent.add_property('pois', lambda self: intersecting(POI, self), _(u"POIs"))
tourism_models.TouristicEvent.add_property('published_pois', lambda self: intersecting(POI, self).filter(published=True), _(u"Published POIs"))
class POIType(PictogramMixin):
label = models.CharField(verbose_name=_(u"Label"), max_length=128, db_column='nom')
cirkwi = models.ForeignKey('cirkwi.CirkwiPOICategory', verbose_name=_(u"Cirkwi POI category"), null=True, blank=True)
class Meta:
db_table = 'o_b_poi'
verbose_name = _(u"POI type")
verbose_name_plural = _(u"POI types")
ordering = ['label']
def __unicode__(self):
return self.label
class ServiceType(PictogramMixin, PublishableMixin):
practices = models.ManyToManyField('Practice', related_name="services",
db_table="o_r_service_pratique", blank=True, null=True,
verbose_name=_(u"Practices"))
class Meta:
db_table = 'o_b_service'
verbose_name = _(u"Service type")
verbose_name_plural = _(u"Service types")
ordering = ['name']
def __unicode__(self):
return self.name
class ServiceManager(models.GeoManager):
def get_queryset(self):
return super(ServiceManager, self).get_queryset().select_related('type', 'structure')
class Service(StructureRelated, MapEntityMixin, Topology):
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
type = models.ForeignKey('ServiceType', related_name='services', verbose_name=_(u"Type"), db_column='type')
eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, db_column='id_externe')
class Meta:
db_table = 'o_t_service'
verbose_name = _(u"Service")
verbose_name_plural = _(u"Services")
# Override default manager
objects = Topology.get_manager_cls(ServiceManager)()
def __unicode__(self):
return unicode(self.type)
@property
def name(self):
return self.type.name
@property
def name_display(self):
s = u'<a data-pk="%s" href="%s" title="%s">%s</a>' % (self.pk,
self.get_detail_url(),
self.name,
self.name)
if self.type.published:
s = u'<span class="badge badge-success" title="%s">☆</span> ' % _("Published") + s
elif self.type.review:
s = u'<span class="badge badge-warning" title="%s">☆</span> ' % _("Waiting for publication") + s
return s
@classproperty
def name_verbose_name(cls):
return _("Type")
@property
def type_display(self):
return unicode(self.type)
@property
def serializable_type(self):
return {'label': self.type.label,
'pictogram': self.type.get_pictogram_url()}
@classmethod
def path_services(cls, path):
return cls.objects.existing().filter(aggregations__path=path).distinct('pk')
@classmethod
def topology_services(cls, topology):
if settings.TREKKING_TOPOLOGY_ENABLED:
qs = cls.overlapping(topology)
else:
area = topology.geom.buffer(settings.TREK_POI_INTERSECTION_MARGIN)
qs = cls.objects.existing().filter(geom__intersects=area)
if isinstance(topology, Trek):
qs = qs.filter(type__practices=topology.practice)
return qs
@classmethod
def published_topology_services(cls, topology):
return cls.topology_services(topology).filter(type__published=True)
def distance(self, to_cls):
return settings.TOURISM_INTERSECTION_MARGIN
Path.add_property('services', Service.path_services, _(u"Services"))
Topology.add_property('services', Service.topology_services, _(u"Services"))
Topology.add_property('published_services', Service.published_topology_services, _(u"Published Services"))
Intervention.add_property('services', lambda self: self.topology.services if self.topology else [], _(u"Services"))
Project.add_property('services', lambda self: self.edges_by_attr('services'), _(u"Services"))
tourism_models.TouristicContent.add_property('services', lambda self: intersecting(Service, self), _(u"Services"))
tourism_models.TouristicContent.add_property('published_services', lambda self: intersecting(Service, self).filter(published=True), _(u"Published Services"))
tourism_models.TouristicEvent.add_property('services', lambda self: intersecting(Service, self), _(u"Services"))
tourism_models.TouristicEvent.add_property('published_services', lambda self: intersecting(Service, self).filter(published=True), _(u"Published Services"))
| johan--/Geotrek | geotrek/trekking/models.py | Python | bsd-2-clause | 29,407 |
import re
import base64
from anillo.http.responses import Response
class HttpBasicAuthBackend:
def __init__(self, auth_func):
self.auth_regex = re.compile(r"^Basic: (.*)$")
self.auth_func = auth_func
def parse(self, request):
authorization = request.headers.get("Authorization", None)
if authorization is None:
return None
match = re.match(self.auth_regex, authorization)
if match is None:
return None
try:
auth_data = base64.b64decode(match.group(1).encode("utf-8")).decode()
(username, password) = auth_data.split(":")
return {"username": username, "password": password}
except Exception:
return None
def authenticate(self, request, data):
identity = self.auth_func(data["username"], data["password"])
if isinstance(identity, Response):
return identity
request.identity = identity
return request
| jespino/anillo-auth | anillo_auth/backends/http_auth.py | Python | bsd-2-clause | 998 |
from django.db import models
class Stub(models.Model):
"""
A stub model for testing check_migrations.
Has an incomplete set of migrations: the `age field is missing.
"""
name = models.CharField(max_length=255)
age = models.IntegerField()
| incuna/incuna-test-utils | tests/partial_migrations/models.py | Python | bsd-2-clause | 265 |
from ailment.expression import Load, Const
from .base import PeepholeOptimizationExprBase
class ConstantDereferences(PeepholeOptimizationExprBase):
"""
Dereferences constant memory loads from read-only memory regions.
"""
__slots__ = ()
name = "Dereference constant references"
expr_classes = (Load, )
def optimize(self, expr: Load):
if isinstance(expr.addr, Const):
# is it loading from a read-only section?
sec = self.project.loader.find_section_containing(expr.addr.value)
if sec is not None and sec.is_readable and not sec.is_writable:
# do we know the value that it's reading?
try:
val = self.project.loader.memory.unpack_word(expr.addr.value, size=self.project.arch.bytes)
except KeyError:
return expr
return Const(None, None, val, expr.bits, **expr.tags)
return None
| angr/angr | angr/analyses/decompiler/peephole_optimizations/constant_derefs.py | Python | bsd-2-clause | 967 |
# -*- coding: UTF-8 -*-
# Copyright 2017-2020 Rumma & Ko Ltd
# License: GNU Affero General Public License v3 (see file COPYING for details)
from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
from lino.api import dd, rt
from lino import mixins
from lino.modlib.users.mixins import UserAuthored
from .roles import TrendsStaff, TrendsUser
class TrendArea(mixins.BabelNamed):
class Meta:
app_label = 'trends'
abstract = dd.is_abstract_model(__name__, 'TrendArea')
verbose_name = _("Trend area")
verbose_name_plural = _("Trend areas")
class TrendAreas(dd.Table):
required_roles = dd.login_required(TrendsStaff)
model = 'trends.TrendArea'
column_names = 'name *'
detail_layout = """
id name
StagesByArea
"""
class TrendStage(mixins.BabelNamed, mixins.Referrable):
ref_max_length = 20
class Meta:
app_label = 'trends'
abstract = dd.is_abstract_model(__name__, 'TrendStage')
verbose_name = _("Trend stage")
verbose_name_plural = _("Trend stages")
trend_area = dd.ForeignKey('trends.TrendArea', blank=True, null=True)
subject_column = models.BooleanField(_("Subject column"), default=False)
class TrendStages(dd.Table):
required_roles = dd.login_required(TrendsUser)
model = 'trends.TrendStage'
column_names = 'ref trend_area name subject_column *'
order_by = ['ref', 'trend_area', 'name']
stay_in_grid = True
insert_layout = """
name
ref trend_area
"""
detail_layout = dd.DetailLayout("""
ref trend_area id
name
EventsByStage
""")
class StagesByArea(TrendStages):
master_key = 'trend_area'
column_names = "ref name *"
class TrendEvent(UserAuthored):
class Meta:
app_label = 'trends'
abstract = dd.is_abstract_model(__name__, 'TrendEvent')
verbose_name = _("Trend event")
verbose_name_plural = _("Trend events")
# unique_together = ['subject', 'trend_stage']
subject = dd.ForeignKey(
dd.plugins.trends.subject_model,
related_name="trend_events")
event_date = models.DateField(_("Date"), default=dd.today)
trend_area = dd.ForeignKey('trends.TrendArea')
trend_stage = dd.ForeignKey(
'trends.TrendStage', blank=True, null=True)
remark = models.CharField(_("Remark"), max_length=200, blank=True)
@dd.chooser()
def trend_stage_choices(self, trend_area):
if not trend_area:
return rt.models.trends.TrendStage.objects.none()
return rt.models.trends.TrendStage.objects.filter(
trend_area=trend_area)
def full_clean(self):
if self.trend_stage_id:
if not self.trend_area_id:
self.trend_area = self.trend_stage.trend_area
super(TrendEvent, self).full_clean()
class TrendEvents(dd.Table):
required_roles = dd.login_required(TrendsUser)
model = 'trends.TrendEvent'
order_by = ['event_date', 'id']
class EventsByStage(TrendEvents):
label = _("Trend events")
master_key = 'trend_stage'
column_names = "event_date subject remark * subject__*"
class EventsBySubject(TrendEvents):
master_key = 'subject'
column_names = "event_date trend_area trend_stage remark *"
class AllTrendEvents(TrendEvents):
required_roles = dd.login_required(TrendsStaff)
| lino-framework/xl | lino_xl/lib/trends/models.py | Python | bsd-2-clause | 3,415 |
import os
from unittest import TestCase
from nose.tools import istest
from envparser import Parser
class ParserTest(TestCase):
def setUp(self):
self.basefile = os.path.abspath(os.path.join(os.path.dirname(__file__), 'fixtures', 'base.cfg'))
@istest
def gets_string_from_configuration(self):
parser = Parser(self.basefile)
name = parser.get('name')
self.assertEqual(name, 'John Doe')
@istest
def gets_string_from_configuration_default_if_section_and_file_dont_exist(self):
parser = Parser(self.basefile, 'weirdsection')
name = parser.get('name')
self.assertEqual(name, 'John Doe')
@istest
def gets_string_from_another_environment_if_section_exists(self):
parser = Parser(self.basefile, 'dev')
name = parser.get('name')
self.assertEqual(name, 'John Doe Dev')
@istest
def gets_string_from_another_environment_if_file_exists(self):
parser = Parser(self.basefile, 'live')
name = parser.get('name')
self.assertEqual(name, 'John Doe Live')
@istest
def gets_string_from_another_environment_if_file_and_section_exists(self):
parser = Parser(self.basefile, 'tests')
name = parser.get('name')
self.assertEqual(name, 'John Doe Tests')
@istest
def gets_integer_from_configuration(self):
parser = Parser(self.basefile)
age = parser.getint('age')
self.assertEqual(age, 30)
@istest
def gets_integer_from_configuration_default_if_doesnt_exist_in_section(self):
parser = Parser(self.basefile, 'dev')
age = parser.getint('age')
self.assertEqual(age, 30)
@istest
def gets_integer_from_configuration_default_if_section_and_file_dont_exist(self):
parser = Parser(self.basefile, 'weirdsection')
name = parser.getint('age')
self.assertEqual(name, 30)
@istest
def gets_float_from_configuration(self):
parser = Parser(self.basefile)
salary = parser.getfloat('salary')
self.assertEqual(salary, 560.00)
@istest
def gets_string_from_another_environment_using_same_extension_as_base_file(self):
parser = Parser(os.path.abspath(os.path.join(os.path.dirname(__file__), 'fixtures', 'base.ini')), 'live')
name = parser.get('name')
self.assertEqual(name, 'John Doe Live INI')
@istest
def gets_boolean_from_configuration(self):
parser = Parser(self.basefile)
is_masculin = parser.getboolean('is_masculin')
self.assertTrue(is_masculin) | diogobaeder/envparser | tests/test_parser.py | Python | bsd-2-clause | 2,599 |
# coding:utf-8
from __future__ import unicode_literals
from django.views.generic import DetailView, CreateView, ListView, UpdateView
from django.http.response import HttpResponse
from django.utils import timezone
from django.http import HttpResponse
import datetime
import ujson
import json
from app.myblog.models import Article
from src.dss.Mixin import *
from src.dss.Serializer import *
def test(req):
article_list = Article.objects.all()
article = article_list[0]
# print article._meta.get_all_field_names()
# print getattr(article, 'tags').all()[0].tags_art.all()
json_data = serializer(article, datetime_format='timestamp', output_type='json', deep=True, many=True, exclude_attr=['comments','tags'])
return HttpResponse(json_data, content_type='application/json')
class ArticleDetailView(JsonResponseMixin, DetailView):
model = Article
datetime_type = 'timestamp'
many = True
foreign = True
exclude_attr = ('comments', )
fields = ['caption', 'content', 'classification', 'tags']
queryset = Article.objects.all()
# slug_url_kwarg = '32'
pk_url_kwarg = 'id'
success_url = '/'
# context_object_name = 'article_list'
# queryset = Article.objects.filter(publish=False)
# ordering = '-create_time'
# paginate_orphans = 1
paginate_by = 2
# slug_field = 'id'
template_name = 'test_cbv.html'
# http_method_names = [u'get', u'post', u'put', u'patch', u'delete', u'head', u'options', u'trace', u'link']
def get_context_data(self, **kwargs):
context = super(ArticleDetailView, self).get_context_data(**kwargs)
context['now'] = timezone.now()
print context
# print context['page_obj'].paginator.page_range
return context
#
# # def get_object(self, queryset=None):
# # return Article.objects.get(id=32)
#
# def get(self, request, *args, **kwargs):
# self.kwargs['id'] = u'32'
# return super(ArticleDetailView, self).get(request, *args, **kwargs)
#
# # def get(self, request, *args, **kwargs):
# # obj = self.get_object()
# # json_obj = model_serializer(obj, serializer='json')
# # return HttpResponse(json_obj, content_type='application/json')
#
# def delete(self, request, *args, **kwargs):
# return HttpResponse('delete')
#
# def post(self, request, *args, **kwargs):
# return HttpResponse('post')
#
# def link(self, request, *args, **kwargs):
# return HttpResponse('link')
| bluedazzle/django-angularjs-blog | app/blog_lab/cbv/views.py | Python | bsd-2-clause | 2,594 |
import glob
import hashlib
import json
import os
from ..core import private
from ..interface import Plugin
from ..descriptors import Link, OwnerName
class BaseDataStore(Plugin):
pass
def iswritable(directory):
"""
Check if `directory` is writable.
Examples
--------
.. Run the code below in a clean temporary directory:
>>> getfixture('cleancwd')
>>> iswritable('.')
True
>>> os.path.exists('spam')
False
>>> iswritable('spam/egg')
True
>>> os.access('/', os.W_OK | os.X_OK)
False
>>> os.path.exists('/spam')
False
>>> iswritable('/spam/egg')
False
"""
parent = os.path.realpath(directory)
cur = os.path.join(parent, '_dummy_')
while parent != cur:
if os.path.exists(parent):
if os.access(parent, os.W_OK | os.X_OK):
return True
else:
return False
cur, parent = parent, os.path.dirname(parent)
class DirectoryDataStore(BaseDataStore):
"""
Data-store using a directory.
Examples
--------
.. Run the code below in a clean temporary directory:
>>> getfixture('cleancwd')
>>> from compapp.core import Parametric
>>> class MyParametric(Parametric):
... datastore = DirectoryDataStore
...
>>> mp = MyParametric()
>>> mp.datastore.dir = 'out'
>>> mp.datastore.path('file')
'out/file'
`.path` creates intermediate directories if required:
>>> os.listdir('.')
['out']
If a :term:`nested class` uses `DirectoryDataStore`, the path is
automatically allocated under the `.dir` of the :term:`owner
class`.
>>> class MyParametric(Parametric):
... datastore = DirectoryDataStore
...
... class nested(Parametric):
... datastore = DirectoryDataStore
...
>>> mp = MyParametric()
>>> mp.datastore.dir = 'out'
>>> mp.nested.datastore.path()
'out/nested'
>>> mp.nested.datastore.path('file')
'out/nested/file'
>>> mp.nested.datastore.path('dir', 'file')
'out/nested/dir/file'
>>> mp.nested.datastore.dir = 'another'
>>> mp.nested.datastore.path('file')
'another/file'
"""
_parent = Link('...datastore')
_ownername = OwnerName()
overwrite = True
clear_before_run = True
on = True
@property
def dir(self):
"""
Path to datastore directory (optional).
"""
if not self.on:
return None
try:
return self._dir
except AttributeError:
pass
try:
parentdir = self._parent.dir
ownername = self._ownername
except AttributeError:
return None
if parentdir is None:
return None
return os.path.join(parentdir, ownername)
@dir.setter
def dir(self, value):
assert isinstance(value, str)
self._dir = value
def is_writable(self):
return self.dir and iswritable(self.dir)
def prepare(self):
if hasattr(self, '_dir') and not iswritable(self._dir):
raise RuntimeError("Directory {0} is not writable."
.format(self._dir))
def path(self, *args, **kwds):
"""
Path relative to the base directory `.dir`.
Parameters
----------
args : str
Path relative to `.dir`.
It will be joined by `os.path.join`.
Keyword Arguments
-----------------
mkdir : bool
If `True` (default), make the parent directory of returned
`path` (i.e., ``os.path.dirname(path)``, not the `path`
itself).
Returns
-------
path : str
``os.path.join(self.dir, *args)``
"""
def makepath(args, mkdir=True):
path = os.path.join(self.dir, *args)
dirname = os.path.dirname(path)
if mkdir and not os.path.isdir(dirname):
os.makedirs(dirname)
return path
return makepath(args, **kwds)
def exists(self, *path):
return self.dir and os.path.exists(self.path(*path, mkdir=False))
def globitems(self, pattern):
files = glob.glob(self.path(pattern, mkdir=False))
for path in files:
yield os.path.basename(path), path
class SubDataStore(DirectoryDataStore):
"""
Data-store using sub-paths of parent data-store.
Examples
--------
.. Run the code below in a clean temporary directory:
>>> getfixture('cleancwd')
>>> from compapp.core import Parametric
>>> class MyParametric(Parametric):
... datastore = DirectoryDataStore
...
... class nested(Parametric):
... datastore = SubDataStore
...
>>> mp = MyParametric()
>>> mp.datastore.dir = 'out'
>>> mp.nested.datastore.path()
'out/nested'
>>> mp.nested.datastore.path('file')
'out/nested-file'
>>> mp.nested.datastore.path('dir', 'file')
'out/nested-dir/file'
>>> mp.nested.datastore.path('a', 'b', 'c')
'out/nested-a/b/c'
>>> mp.nested.datastore.sep = '.'
>>> mp.nested.datastore.path('file')
'out/nested.file'
Since `DirectoryDataStore` already can be used for datastore using
sub-directories, this class is specialized for the case using
files under the directory of parent datastore. If the
:term:`owner class` of this datastore uses only a few files, it
makes sense to not allocate a directory and to use this type of
datastore.
"""
# MAYBE: this class should be called ParasiteDataStore?
dir = Link('._parent.dir')
_parent = Link('...datastore')
_ownername = OwnerName()
sep = '-'
def path(self, *args, **kwds):
# but how about List/Dict?
if not args:
return self._parent.path(self._ownername)
part0 = self._ownername + self.sep + args[0]
return self._parent.path(part0, *args[1:], **kwds)
def globitems(self, pattern):
for filename, path in super(SubDataStore, self).globitems(pattern):
yield filename[len(self._ownername + self.sep):], path
def hexdigest(jsonable):
"""
Calculate hex digest of a `jsonable` object.
>>> hexdigest({'a': 1, 'b': 2, 'c': 3})
'e20096b15530bd66a35a7332619f6666e2322070'
"""
string = json.dumps(jsonable, sort_keys=True).encode()
return hashlib.sha1(string).hexdigest()
class HashDataStore(DirectoryDataStore):
"""
Automatically allocated data-store based on hash of parameter.
Examples
--------
.. Run the code below in a clean temporary directory:
>>> getfixture('cleancwd')
>>> from compapp.core import Parametric
>>> class MyParametric(Parametric):
... datastore = HashDataStore
... a = 1
...
>>> mp = MyParametric()
>>> mp.datastore.prepare()
>>> mp.datastore.dir
'Data/memo/be/a393597a3c5518cad18a4c96c08d038de3f00a'
>>> mp.a = 2
>>> mp.datastore.prepare()
>>> mp.datastore.dir
'Data/memo/a2/722afcdc53688843b61b8d71329fabab16b6ae'
>>> mp.datastore.basedir = '.'
>>> mp.datastore.prepare()
>>> mp.datastore.dir
'./a2/722afcdc53688843b61b8d71329fabab16b6ae'
"""
basedir = os.path.join('Data', 'memo')
def ownerhash(self):
owner = private(self).owner
params = owner.params(nested=True)
del params['datastore']
cls = type(owner)
name = cls.__module__ + '.' + cls.__name__
return hexdigest([name, params])
def prepare(self):
digest = self.ownerhash()
self.dir = os.path.join(self.basedir, digest[:2], digest[2:])
| tkf/compapp | src/compapp/plugins/datastores.py | Python | bsd-2-clause | 7,725 |
#from interface.services.icontainer_agent import ContainerAgentClient
#from pyon.ion.endpoint import ProcessRPCClient
from ion.services.sa.test.helpers import any_old
from pyon.util.containers import DotDict
from pyon.util.int_test import IonIntegrationTestCase
from interface.services.sa.iinstrument_management_service import InstrumentManagementServiceClient
from interface.services.sa.iobservatory_management_service import ObservatoryManagementServiceClient
from interface.services.coi.iresource_registry_service import ResourceRegistryServiceClient
from pyon.public import RT, PRED, log
from nose.plugins.attrib import attr
from ion.util.related_resources_crawler import RelatedResourcesCrawler
from interface.objects import OrgTypeEnum
RT_SITE = "Site"
RT_SUBPLATFORMSITE = "SubPlatformSite"
class ResourceHelper(object):
def create_any(self, resourcetype, first, label=None):
rsrc_id, rev = self.RR.create(any_old(resourcetype))
if label is None: label = resourcetype
if first:
if label in self.care:
self.fail("tried to add a duplicate %s" % label)
log.debug("Creating %s as %s", resourcetype, label)
self.care[label] = rsrc_id
self.realtype[label] = resourcetype
self.assertIn(label, self.care)
self.assertIn(label, self.realtype)
else:
if not resourcetype in self.dontcare: self.dontcare[resourcetype] = []
self.dontcare[resourcetype].append(rsrc_id)
return rsrc_id
def create_observatory(self, first=False, create_with_marine_facility=False):
obs_id = self.create_any(RT.Observatory, first)
if create_with_marine_facility:
org = any_old(RT.Org)
org.org_type = OrgTypeEnum.MARINE_FACILITY
rsrc_id, rev = self.RR.create(org)
aid = self.RR.create_association(subject=rsrc_id, predicate=PRED.hasResource, object=obs_id)
site_id1 = self.create_site(first)
site_id2 = self.create_site(False)
self.RR.create_association(subject=obs_id, predicate=PRED.hasSite, object=site_id1)
self.RR.create_association(subject=obs_id, predicate=PRED.hasSite, object=site_id2)
return obs_id
def create_site(self, first=False):
site_id = self.create_any(RT.Subsite, first, RT_SITE)
subsite_id1 = self.create_subsite(first)
subsite_id2 = self.create_subsite(False)
self.RR.create_association(subject=site_id, predicate=PRED.hasSite, object=subsite_id1)
self.RR.create_association(subject=site_id, predicate=PRED.hasSite, object=subsite_id2)
return site_id
def create_subsite(self, first=False):
subsite_id = self.create_any(RT.Subsite, first)
platformsite_id1 = self.create_platformsite(first)
platformsite_id2 = self.create_platformsite(False)
self.RR.create_association(subject=subsite_id, predicate=PRED.hasSite, object=platformsite_id1)
self.RR.create_association(subject=subsite_id, predicate=PRED.hasSite, object=platformsite_id2)
return subsite_id
def create_platformsite(self, first=False):
platform_model_id = self.create_any(RT.PlatformModel, False) # we never care about this level
platformsite_id = self.create_any(RT.PlatformSite, first)
subplatformsite_id1 = self.create_subplatformsite(first)
subplatformsite_id2 = self.create_subplatformsite(False)
self.RR.create_association(subject=platformsite_id, predicate=PRED.hasSite, object=subplatformsite_id1)
self.RR.create_association(subject=platformsite_id, predicate=PRED.hasSite, object=subplatformsite_id2)
self.RR.create_association(subject=platformsite_id, predicate=PRED.hasModel, object=platform_model_id)
return platformsite_id
def create_subplatformsite(self, first=False):
platform_model_id = self.create_any(RT.PlatformModel, first)
subplatformsite_id = self.create_any(RT.PlatformSite, first, RT_SUBPLATFORMSITE)
self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasModel, object=platform_model_id)
platformdevice_id = self.create_platform_device(platform_model_id, first)
self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasDevice, object=platformdevice_id)
instrumentsite_id1 = self.create_instrumentsite(platformdevice_id, first)
instrumentsite_id2 = self.create_instrumentsite(platformdevice_id, False)
self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasSite, object=instrumentsite_id1)
self.RR.create_association(subject=subplatformsite_id, predicate=PRED.hasSite, object=instrumentsite_id2)
return subplatformsite_id
def create_instrumentsite(self, platform_device_id, first=False):
instrument_model_id = self.create_any(RT.InstrumentModel, first)
instrumentsite_id = self.create_any(RT.InstrumentSite, first)
self.RR.create_association(subject=instrumentsite_id, predicate=PRED.hasModel, object=instrument_model_id)
instrument_device_id = self.create_instrumentdevice(instrument_model_id, first)
self.RR.create_association(subject=platform_device_id, predicate=PRED.hasDevice, object=instrument_device_id)
self.RR.create_association(subject=instrumentsite_id, predicate=PRED.hasDevice, object=instrument_device_id)
return instrumentsite_id
def create_platform_device(self, platform_model_id, first=False):
platformdevice_id = self.create_any(RT.PlatformDevice, first)
self.RR.create_association(subject=platformdevice_id, predicate=PRED.hasModel, object=platform_model_id)
return platformdevice_id
def create_instrumentdevice(self, instrument_model_id, first=False):
instrumentdevice_id = self.create_any(RT.InstrumentDevice, first)
self.RR.create_association(subject=instrumentdevice_id, predicate=PRED.hasModel, object=instrument_model_id)
return instrumentdevice_id
@attr('INT', group='sa')
class TestFindRelatedResources(IonIntegrationTestCase, ResourceHelper):
"""
assembly integration tests at the service level
"""
def setUp(self):
# Start container
self._start_container()
self.container.start_rel_from_url('res/deploy/r2deploy.yml')
self.IMS = InstrumentManagementServiceClient(node=self.container.node)
self.OMS = ObservatoryManagementServiceClient(node=self.container.node)
self.RR = ResourceRegistryServiceClient(node=self.container.node)
self.care = {}
self.dontcare = {}
self.realtype = {}
# @unittest.skip('this test just for debugging setup')
# def test_just_the_setup(self):
# return
def create_dummy_structure(self):
"""
Create two observatories.
- each observatory has 2 subsites
- each subsite has 2 more subsites
- each of those subsites has 2 platform sites
- each of those platform sites has a model and 2 sub- platform sites
- each of those sub- platform sites has a model, matching platform device, and 2 instrument sites
- each of those instrument sites has a model and matching instrument device
One of each resource type (observatory all the way down to instrument device/model) is what we "care" about
- it goes in the self.care dict
All the rest go in the self.dontcare dict
To manage subsite/platform multiplicity, we alias them in the dict... the proper hierarchy is:
Observatory-Site-Subsite-PlatformSite-SubPlatformSite-InstrumentSite
self.realtype[alias] gives the real resource type of an alias
"""
self.create_observatory(True)
self.create_observatory(False)
for rt in [RT.Observatory, RT_SITE, RT.Subsite,
RT.PlatformSite, RT_SUBPLATFORMSITE, RT.PlatformDevice, RT.PlatformModel,
RT.InstrumentSite, RT.InstrumentDevice, RT.InstrumentModel
]:
self.assertIn(rt, self.care)
self.expected_associations = [
(RT.Observatory, PRED.hasSite, RT_SITE),
(RT.Site, PRED.hasSite, RT.Subsite),
(RT.Subsite, PRED.hasSite, RT.PlatformSite),
(RT.PlatformSite, PRED.hasSite, RT_SUBPLATFORMSITE),
(RT_SUBPLATFORMSITE, PRED.hasSite, RT.InstrumentSite),
(RT_SUBPLATFORMSITE, PRED.hasModel, RT.PlatformModel),
(RT_SUBPLATFORMSITE, PRED.hasDevice, RT.PlatformDevice),
(RT.PlatformDevice, PRED.hasModel, RT.PlatformModel),
(RT.InstrumentSite, PRED.hasModel, RT.InstrumentModel),
(RT.InstrumentSite, PRED.hasDevice, RT.InstrumentDevice),
(RT.InstrumentDevice, PRED.hasModel, RT.InstrumentModel)
]
log.info("Verifying created structure")
for (st, p, ot) in self.expected_associations:
rst = self.realtype[st]
rot = self.realtype[ot]
s = self.care[st]
o = self.care[ot]
log.debug("searching %s->%s->%s as %s->%s->%s" % (st, p, ot, rst, p, rot))
log.debug(" - expecting %s %s" % (rot, o))
a = self.RR.find_associations(subject=s, predicate=p, object=o)
if not (0 < len(a) < 3):
a2 = self.RR.find_associations(subject=s, predicate=p)
a2content = [("(%s %s)" % (alt.ot, alt.o)) for alt in a2]
self.fail("Expected 1-2 associations for %s->%s->%s, got %s: %s" % (st, p, ot, len(a2), a2content))
self.assertIn(o, [aa.o for aa in a])
log.info("CREATED STRUCTURE APPEARS CORRECT ===============================")
def simplify_assn_resource_ids(self, assn_list):
count = 0
lookup = {}
retval = []
for a in assn_list:
if not a.s in lookup:
lookup[a.s] = count
count += 1
if not a.o in lookup:
lookup[a.o] = count
count += 1
retval.append(DotDict({"s":lookup[a.s], "st":a.st, "p":a.p, "o":lookup[a.o], "ot":a.ot}))
return retval
def describe_assn_graph(self, assn_list):
return [("%s %s -> %s -> %s %s" % (a.st, a.s, a.p, a.ot, a.o)) for a in assn_list]
#@unittest.skip('refactoring')
def test_related_resource_crawler(self):
"""
"""
self.create_dummy_structure()
r = RelatedResourcesCrawler()
# test the basic forward-backward searches
for (st, p, ot) in self.expected_associations:
rst = self.realtype[st]
rot = self.realtype[ot]
s = self.care[st]
o = self.care[ot]
test_sto_fn = r.generate_get_related_resources_fn(self.RR, [rot], {p: (True, False)})
sto_crawl = test_sto_fn(s, 1) # depth of 1
if 2 < len(sto_crawl): # we get 2 because of care/dontcare
self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(sto_crawl)))
self.assertIn(o, [t.o for t in sto_crawl])
test_ots_fn = r.generate_get_related_resources_fn(self.RR, [rst], {p: (False, True)})
ots_crawl = test_ots_fn(o, 1) # depth of 1
if 1 != len(ots_crawl):
self.fail("got %s" % self.describe_assn_graph(self.simplify_assn_resource_ids(ots_crawl)))
# test a nontrivial lookup, in which we extract resources related to an instrument device
rw = []
pd = {}
# we want things related to an instrument device
rw.append(RT.PlatformModel)
rw.append(RT.InstrumentModel)
rw.append(RT.PlatformDevice)
rw.append(RT.InstrumentSite)
rw.append(RT.PlatformSite)
rw.append(RT.Subsite)
rw.append(RT.Observatory)
rw.append(RT.InstrumentDevice)
pd[PRED.hasModel] = (True, True)
pd[PRED.hasDevice] = (False, True)
pd[PRED.hasSite] = (False, True)
test_real_fn = r.generate_get_related_resources_fn(self.RR, resource_whitelist=rw, predicate_dictionary=pd)
related = test_real_fn(self.care[RT.InstrumentDevice])
log.debug("========= Result is:")
for l in self.describe_assn_graph(self.simplify_assn_resource_ids(related)):
log.debug(" %s", l)
# check that we only got things we care about
for a in related:
# special case for platform model, because we don't care about the top-level platform's model
# so it will blow up if we don't ignore it. if we got an extra platform model, we'd have an
# extra platform anyway... so this special case is safe.
if a.st != RT.PlatformModel:
self.assertIn(a.s, self.care.values(), "%s %s not cared about" % (a.st, a.s))
if a.ot != RT.PlatformModel:
self.assertIn(a.o, self.care.values(), "%s %s not cared about" % (a.ot, a.o))
| ooici/coi-services | ion/services/sa/test/test_find_related_resources.py | Python | bsd-2-clause | 13,097 |
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division, print_function, unicode_literals
import re
from .utils import normalize_whitespace
class Paragraph(object):
"""Object representing one block of text in HTML."""
def __init__(self, path):
self.dom_path = path.dom
self.xpath = path.xpath
self.text_nodes = []
self.chars_count_in_links = 0
self.tags_count = 0
@property
def is_heading(self):
return bool(re.search(r"\bh\d\b", self.dom_path))
@property
def is_boilerplate(self):
return self.class_type != "good"
@property
def text(self):
text = "".join(self.text_nodes)
return normalize_whitespace(text.strip())
def __len__(self):
return len(self.text)
@property
def words_count(self):
return len(self.text.split())
def contains_text(self):
return bool(self.text_nodes)
def append_text(self, text):
text = normalize_whitespace(text)
self.text_nodes.append(text)
return text
def stopwords_count(self, stopwords):
count = 0
for word in self.text.split():
if word.lower() in stopwords:
count += 1
return count
def stopwords_density(self, stopwords):
words_count = self.words_count
if words_count == 0:
return 0
return self.stopwords_count(stopwords) / words_count
def links_density(self):
text_length = len(self.text)
if text_length == 0:
return 0
return self.chars_count_in_links / text_length
| pombredanne/jusText | justext/paragraph.py | Python | bsd-2-clause | 1,660 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Video.torrentDone'
db.add_column('portal_video', 'torrentDone',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Video.torrentDone'
db.delete_column('portal_video', 'torrentDone')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'taggit.tag': {
'Meta': {'object_name': 'Tag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '100'})
},
'taggit.taggeditem': {
'Meta': {'object_name': 'TaggedItem'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_tagged_items'", 'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'taggit_taggeditem_items'", 'to': "orm['taggit.Tag']"})
},
'portal.channel': {
'Meta': {'object_name': 'Channel'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'})
},
'portal.comment': {
'Meta': {'object_name': 'Comment'},
'comment': ('django.db.models.fields.TextField', [], {'max_length': '1000'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'timecode': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Video']"})
},
'portal.hotfolder': {
'Meta': {'object_name': 'Hotfolder'},
'activated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'autoPublish': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Channel']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'defaultName': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'folderName': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.IntegerField', [], {'max_length': '1'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
'portal.video': {
'Meta': {'object_name': 'Video'},
'assemblyid': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'autoPublish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'channel': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['portal.Channel']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date': ('django.db.models.fields.DateField', [], {}),
'description': ('django.db.models.fields.TextField', [], {}),
'duration': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'encodingDone': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.IntegerField', [], {'max_length': '1'}),
'linkURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'mp3Size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mp3URL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'mp4Size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mp4URL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'oggSize': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'oggURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'originalFile': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'torrentDone': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'torrentURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'videoThumbURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'webmSize': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'webmURL': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['portal'] | LambdaCast/LambdaCast | portal/migrations/0013_auto__add_field_video_torrentDone.py | Python | bsd-2-clause | 10,677 |
import ast
import tokenize
import io
import collections
import itertools
def rewrite_imports_in_module(source, top_level_names, depth):
source_lines = source.splitlines(True)
line_endings = list(_find_line_endings(source))
def _find_line_ending(position):
for line_ending in line_endings:
if line_ending >= position:
return line_ending
raise Exception("Could not find line ending")
def _should_rewrite_import(name):
return name.split(".")[0] in top_level_names
def _generate_simple_import_replacement(node):
temp_index = itertools.count()
replacement = []
for name in node.names:
if _should_rewrite_import(name.name):
parts = name.name.split(".")
if name.asname is None:
for part_index, part in enumerate(parts):
if part_index == 0:
replacement.append("from ." + ("." * depth) + " import " + part)
else:
variable_name = "___vendorize__{0}".format(next(temp_index))
replacement.append(
"from ." + ("." * depth) + ".".join(parts[:part_index]) +
" import " + part +
" as " + variable_name)
replacement.append(".".join(parts[:part_index + 1]) + " = " + variable_name)
else:
replacement.append(
"from ." + ("." * depth) + ".".join(parts[:-1]) +
" import " + parts[-1] +
" as " + name.asname)
else:
statement = "import " + name.name
if name.asname is not None:
statement += " as " + name.asname
replacement.append(statement)
_, line_ending_col_offset = _find_line_ending((node.lineno, node.col_offset))
return _Replacement(
_Location(node.lineno, node.col_offset),
# TODO: handle multi-line statements
line_ending_col_offset - node.col_offset,
"\n".join(replacement))
def _generate_import_from_replacement(node):
line = source_lines[node.lineno - 1]
col_offset = node.col_offset
from_keyword = "from"
assert line[col_offset:col_offset + len(from_keyword)] == from_keyword
col_offset += len(from_keyword)
while line[col_offset].isspace():
col_offset += 1
return _Replacement(
_Location(node.lineno, col_offset),
0,
"." + ("." * depth))
replacements = []
class ImportVisitor(ast.NodeVisitor):
def visit_Import(self, node):
if any(_should_rewrite_import(name.name) for name in node.names):
replacements.append(_generate_simple_import_replacement(node))
def visit_ImportFrom(self, node):
if not node.level and _should_rewrite_import(node.module):
replacements.append(_generate_import_from_replacement(node))
python_ast = ast.parse(source)
ImportVisitor().visit(python_ast)
return _replace_strings(source, replacements)
def _find_line_endings(source):
token_stream = tokenize.generate_tokens(io.StringIO(source + "\n").readline)
for token_type, token_str, start, end, line in token_stream:
if token_type == tokenize.NEWLINE:
yield start
_Location = collections.namedtuple("_Location", ["lineno", "col_offset"])
_Replacement = collections.namedtuple("_Replacement", [
"location",
"length",
"value"
])
def _replace_strings(source, replacements):
lines = source.splitlines(True)
replacements = sorted(replacements, key=lambda replacement: replacement.location, reverse=True)
for replacement in replacements:
line_index = replacement.location.lineno - 1
col_offset = replacement.location.col_offset
lines[line_index] = _str_replace(lines[line_index], replacement.length, col_offset, replacement.value)
return "".join(lines)
def _str_replace(original, length, index, to_insert):
return original[:index] + to_insert + original[index + length:]
| mwilliamson/python-vendorize | vendorize/import_rewrite.py | Python | bsd-2-clause | 4,390 |
# Copyright 2008-2015 Luc Saffre
# License: BSD (see file COPYING for details)
"""Choicelists for `lino_xl.lib.finan`.
"""
from lino.api import dd, _
# class VoucherStates(dd.Workflow):
# """The list of possible states for a voucher."""
# @classmethod
# def get_editable_states(cls):
# return [o for o in cls.objects() if o.editable]
# add = VoucherStates.add_item
# add('10', _("Draft"), 'draft', editable=True)
# add('20', _("Registered"), 'registered', editable=False)
# @dd.receiver(dd.pre_analyze)
# def setup_workflow(sender=None, **kw):
# VoucherStates.registered.add_transition(
# _("Register"), states='draft', icon_name='accept')
# VoucherStates.draft.add_transition(
# _("Deregister"), states="registered", icon_name='pencil')
| khchine5/xl | lino_xl/lib/finan/choicelists.py | Python | bsd-2-clause | 792 |
# -*- coding: utf-8 -*-
from wtforms import Form, TextField, PasswordField, validators
class LoginForm(Form):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
| fretscha/casimodo | forms.py | Python | bsd-2-clause | 240 |
import os
import sys
import subprocess
import multiprocessing
def command(args):
(arg, output_folder) = args
path, name = os.path.split(arg)
output_prefix = path.replace('/','_')
name, ext = os.path.splitext(name)
all_points = os.path.join(output_folder, '%s-%s-all-points.mat' % (output_prefix, name))
scores = os.path.join(output_folder, '%s-%s-interpolated.mat' % (output_prefix, name))
command = 'python LaneClickTest.py %s %s --quiet; ' % (arg, all_points)
command += 'python interp_2d.py %s %s' % (all_points, scores)
return subprocess.call(command, shell=True)
def main(rootdir, output_folder):
fileList = []
for root, subfolders, files in os.walk(rootdir):
files = filter(lambda z: 'split_0' in z, files)
for f in files:
fileList.append(os.path.join(root,f))
prefix = len(os.path.commonprefix(fileList))
arglist = [ ]
for f in fileList:
path, fname = os.path.split(f)
fname = fname[8:]
arglist.append((path + '/' + fname, output_folder))
#command(arglist[0])
pool = multiprocessing.Pool(processes=12)
print pool.map(command, arglist)
if __name__ == '__main__':
main(sys.argv[1], sys.argv[2])
| sameeptandon/sail-car-log | process/honda-label/LabelSingle.py | Python | bsd-2-clause | 1,210 |
import pytest
import osmium as o
def test_list_types():
ml = o.index.map_types()
assert isinstance(ml, list)
assert ml
@pytest.fixture
def table():
return o.index.create_map("flex_mem")
def test_set_get(table):
table.set(4, o.osm.Location(3.4, -5.6))
l = table.get(4)
assert l.lon == pytest.approx(3.4)
assert l.lat == pytest.approx(-5.6)
def test_get_unset(table):
with pytest.raises(KeyError):
table.get(56)
def test_set_negative(table):
with pytest.raises(TypeError):
table.set(-4, o.osm.Location(3.4, -5.6))
def test_used_memory(table):
table.set(4, o.osm.Location(3.4, -5.6))
assert table.used_memory() > 0
def test_clear(table):
table.set(593, o.osm.Location(0.35, 45.3))
table.get(593)
table.clear()
with pytest.raises(KeyError):
table.get(593)
| osmcode/pyosmium | test/test_index.py | Python | bsd-2-clause | 849 |
import os
from alembic import config
from alembic import command
import pytest
from discode_server import app as app_
from discode_server import db as dbapi
@pytest.fixture(scope='function')
def app():
os.environ['DISCODE_CONFIG'] = 'discode_server/config/test.py'
command.upgrade(config.Config('alembic.ini'), 'head')
app = app_.create_app()
yield app
command.downgrade(config.Config('alembic.ini'), 'base')
@pytest.fixture(scope='function')
def test_client(app):
return app.test_client
@pytest.fixture(scope='function')
def db(app, event_loop):
return event_loop.run_until_complete(
dbapi.create_engine(app.config.DATABASE, loop=event_loop))
| d0ugal/discode-server | discode_server/tests/conftest.py | Python | bsd-2-clause | 688 |
"""
WSGI config for etd_drop project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "etd_drop.settings")
#Attempt to set environment variables from DOTENV, if we've been provided
#a path
DOTENV_path = os.environ.get('DOTENV', None)
if DOTENV_path is not None:
import dotenv
dotenv.read_dotenv(DOTENV_path)
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| MetaArchive/etd-drop | etd_drop/wsgi.py | Python | bsd-3-clause | 603 |
MaximalStaticDimension = 6
BuildFast = False
if BuildFast:
MaximalStaticDimension = 1
dims = ['Eigen::Dynamic']
dimTags = ['D']
for i in range(1, MaximalStaticDimension + 1):
dims.append(str(i))
dimTags.append(str(i))
#types = ['char','short','int','long','unsigned char', 'unsigned short', 'unsigned int', 'unsigned long', 'float', 'double', 'std::complex<float>','std::complex<double>']
#typeTags = ['char','short','int','long','uchar', 'ushort', 'uint', 'ulong', 'float', 'double', 'cfloat','cdouble']
types = ['int', 'float', 'double', 'boost::uint8_t', 'boost::int64_t']
typeTags = ['int', 'float', 'double', 'uchar', 'long']
if BuildFast:
types = ['double']
typeTags = ['double']
| ethz-asl/Schweizer-Messer | numpy_eigen/src/generator_config.py | Python | bsd-3-clause | 714 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import messages
from django.http import Http404
from django.utils.translation import ugettext_lazy as _
from django.views.generic import DetailView, FormView, TemplateView
from dynamic_forms.actions import action_registry
from dynamic_forms.forms import FormModelForm
from dynamic_forms.models import FormModelData
from dynamic_forms.utils import is_old_style_action
class DynamicFormView(FormView):
form_class = FormModelForm
def dispatch(self, request, *args, **kwargs):
self.form_model = self.kwargs.pop('model')
return super(DynamicFormView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(DynamicFormView, self).get_context_data(**kwargs)
context.update({
'model': self.form_model,
'name': self.form_model.name,
'submit_url': self.form_model.submit_url,
})
return context
def get_form_kwargs(self):
kwargs = super(DynamicFormView, self).get_form_kwargs()
kwargs['model'] = self.form_model
return kwargs
def get_success_url(self):
"""
If the ``dynamic_form_store_database`` action is active for the current
form, include the ``display_key`` for the newly created data set.
"""
url = self.form_model.success_url
if self.form_model.allow_display:
store_key = 'dynamic_forms.actions.dynamic_form_store_database'
data = self.action_results.get(store_key)
if data is not None:
url += '?display_key=%s' % data.display_key
return url
def get_template_names(self):
return self.form_model.form_template
def form_valid(self, form):
"""
Instantiates an empty dict ``self.action_results`` that takes the
return values of every action that is called, unless the return value
of that action is ``None``.
"""
self.action_results = {}
for actionkey in self.form_model.actions:
action = action_registry.get(actionkey)
if action is None:
continue
args = (self.form_model, form)
if not is_old_style_action(action):
args = args + (self.request,)
self.action_results[actionkey] = action(*args)
messages.success(self.request,
_('Thank you for submitting this form.'))
return super(DynamicFormView, self).form_valid(form)
def form_invalid(self, form):
messages.error(self.request,
_('An error occurred during submitting this form.'))
return super(DynamicFormView, self).form_invalid(form)
class DynamicTemplateView(TemplateView):
def dispatch(self, request, *args, **kwargs):
self.form_model = self.kwargs.pop('model')
return super(DynamicTemplateView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
"""
If a ``display_key`` query parameter is given and the key belongs to a
FormModel that has ``allow_display=True``, add the data to the normal
Django `TemplateView` context.
"""
context = super(DynamicTemplateView, self).get_context_data(**kwargs)
try:
display_key = self.request.GET.get('display_key')
data = FormModelData.objects.get(display_key=display_key)
context.update({
'data': data,
})
except (FormModelData.DoesNotExist, FormModelData.MultipleObjectsReturned):
pass
return context
def get_template_names(self):
return self.form_model.success_template
class DynamicDataMixin(object):
slug_field = 'display_key'
slug_url_kwarg = 'display_key'
template_name_404 = 'dynamic_forms/data_set_404.html'
def get(self, request, *args, **kwargs):
try:
return super(DynamicDataMixin, self).get(request, *args, **kwargs)
except Http404:
return self.render_404({})
def get_template_names_404(self):
return [self.template_name_404]
def render_404(self, context=None, **response_kwargs):
ctx = {
'display_key': self.kwargs.get(self.slug_url_kwarg)
}
if context:
ctx.update(context)
return self.response_class(request=self.request,
template=self.get_template_names_404(), context=ctx, status=404,
**response_kwargs)
class DynamicDataSetDetailView(DynamicDataMixin, DetailView):
model = FormModelData
template_name = 'dynamic_forms/data_set.html'
data_set_detail = DynamicDataSetDetailView.as_view()
| uhuramedia/django-dynamic-forms | dynamic_forms/views.py | Python | bsd-3-clause | 4,771 |
from .oll import oll
VERSION = (0, 2, 1)
__version__ = "0.2.1"
__all__ = ["oll"]
| ikegami-yukino/oll-python | oll/__init__.py | Python | bsd-3-clause | 82 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('titles', '0002_auto_20141013_2232'),
('character', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('role', models.CharField(default=b'Actor', max_length=20, choices=[(b'Director', b'Director'), (b'Writer', b'Writer'), (b'Producer', b'Producer'), (b'Music', b'Music'), (b'Cinematography', b'Cinematography'), (b'Crew', b'Crew'), (b'Actor', b'Actor')])),
('desc', models.TextField(blank=True)),
('url', models.URLField(blank=True)),
('imdb', models.URLField(blank=True)),
('character', models.ForeignKey(to='character.Character', null=True)),
('title', models.ForeignKey(to='titles.Adaptation')),
],
options={
},
bases=(models.Model,),
),
]
| vivyly/fancastic_17 | fancastic_17/role/migrations/0001_initial.py | Python | bsd-3-clause | 1,171 |
# -*- coding: utf-8 -*-
import wx
from datetime import datetime
import db
_sMedFont = None
def MedFont():
global _sMedFont
if not _sMedFont:
_sMedFont = wx.Font(10, wx.FONTFAMILY_DEFAULT, wx.NORMAL, wx.NORMAL)
return _sMedFont
_sBigFont = None
def BigFont():
global _sBigFont
if not _sBigFont:
_sBigFont = wx.Font(11, wx.FONTFAMILY_DEFAULT, wx.NORMAL, wx.NORMAL)
return _sBigFont
_sHugeFont = None
def HugeFont():
global _sHugeFont
if not _sHugeFont:
_sHugeFont = wx.Font(16, wx.FONTFAMILY_DEFAULT, wx.NORMAL, wx.NORMAL)
return _sHugeFont
def GetShoptimeTypeDescription(type):
if type in db.shoptimeChoices:
return db.shoptimeChoices[type]
else:
return "\"{0}\"".format(type)
def MakeInfoEntrySizer():
sizer = wx.FlexGridSizer(0, 2)
sizer.AddGrowableCol(1)
return sizer
def MakeStaticBoxSizer(parent, label = "", style = wx.HORIZONTAL):
staticBox = wx.StaticBox(parent, label = label)
staticBox.SetFont(MedFont())
return wx.StaticBoxSizer(staticBox, style)
def AddField(parent, sizer, font, label, entryKind = wx.TextCtrl, style = 0):
text = wx.StaticText(parent, wx.ID_ANY, label)
if font:
text.SetFont(font)
sizer.Add(text, 0, wx.ALIGN_CENTER_VERTICAL | wx.ALIGN_RIGHT | wx.ALL, 5)
field = entryKind(parent, wx.ID_ANY, style = style)
sizer.Add(field, 0, wx.EXPAND)
return field
def AddLabel(parent, sizer, font, string, flags = 0, type = wx.StaticText):
label = type(parent, wx.ID_ANY, string)
label.SetFont(font)
sizer.Add(label, 0, flags | wx.ALIGN_CENTER_VERTICAL | wx.ALL, 5)
return label
def FormatTimedelta(timedelta):
hours = timedelta.days * 24
hours += timedelta.seconds / (3600)
minutes = (timedelta.seconds % 3600) / 60
return "{0}h {1}m".format(hours, minutes)
def DatetimeWxToPy(wxdt):
return datetime(
wxdt.GetYear(),
wxdt.GetMonth() + 1,
wxdt.GetDay(),
wxdt.GetHour(),
wxdt.GetMinute(),
wxdt.GetSecond(),
wxdt.GetMillisecond() * 1000)
def DatetimePyToWx(pydt):
wxdt = wx.DateTime().Now()
if hasattr(pydt, "year"):
wxdt.SetYear(pydt.year)
wxdt.SetMonth(pydt.month - 1)
wxdt.SetDay(pydt.day)
if hasattr(pydt, "hour"):
wxdt.SetHour(pydt.hour)
wxdt.SetMinute(pydt.minute)
wxdt.SetSecond(pydt.second)
wxdt.SetMillisecond(pydt.microsecond / 1000)
return wxdt
def GetTextSize(text, font, parent):
dc = wx.ClientDC(parent);
dc.SetFont(font)
return dc.GetTextExtent(text)
class Delegator(object):
def __init__(self):
self._delegates = []
def AppendDelegate(self, delegate):
self._delegates.append(delegate)
def PushDelegate(self, delegate):
self._delegates.insert(0, delegate)
def RemoveDelegate(self, delegate):
try:
self._delegates.remove(delegate)
except ValueError:
pass
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
for delegate in self._delegates:
if hasattr(delegate, attr):
return getattr(delegate, attr)
raise AttributeError("No attribute \'{0}\' in {1} or delegates".format(
attr, self.__class__.__name__))
class WindowSizes:
pass
winSizes = WindowSizes()
winSizes.mainWindow = (1000, 550)
winSizes.authenticateMechanic = (300, 140)
winSizes.newPerson = (350, 450)
winSizes.viewPerson = (950, 470)
winSizes.shoptimeDialog = (300, 200)
winSizes.bikeDialog = (330, 200)
winSizes.feedbackDialog = (340, 160)
winSizes.mechanicToolbox = (250, 150)
winSizes.findBike = (450, 370)
| arthurljones/bikechurch-signin-python | src/ui.py | Python | bsd-3-clause | 3,409 |
import logging
import numpy as np
from golem import DataSet
from golem.nodes import FeatMap, BaseNode
from ..utils import spectrogram, sliding_window
class TFC(BaseNode):
def __init__(self, nfft, win_step):
def tfc(x):
return np.dstack([spectrogram(x[:, ci], nfft, win_step)
for ci in range(x.shape[1])])
BaseNode.__init__(self)
self.nfft, self.win_step = nfft, win_step
self.n = FeatMap(tfc)
def apply_(self, d):
assert len(d.feat_shape) == 2 # [frames x channels]
if d.feat_dim_lab != None:
assert d.feat_dim_lab[0] == 'time'
tfc = self.n.apply(d)
feat_dim_lab = ['time', 'frequency', d.feat_dim_lab[1]]
if d.feat_nd_lab != None:
old_time = np.asarray([float(i) for i in d.feat_nd_lab[0]])
time = np.mean(sliding_window(old_time, self.nfft, self.win_step), axis=1)
time = ['%.1f' % i for i in time]
dt = np.mean(np.diff(old_time))
dt = (np.max(old_time) - np.min(old_time)) / old_time.size
freqs = np.fft.fftfreq(self.nfft, dt)
freqs = ['%d' % abs(i) for i in freqs[:self.nfft/2 + 1]]
channels = d.feat_nd_lab[1]
feat_nd_lab = [time, freqs, channels]
else:
feat_nd_lab = None
return DataSet(feat_dim_lab=feat_dim_lab, feat_nd_lab=feat_nd_lab,
default=tfc)
| breuderink/psychic | psychic/nodes/timefreq.py | Python | bsd-3-clause | 1,303 |
import json
import logging
import os
import socket
import StringIO
from time import time
import django
from django.conf import settings
from django.contrib.sites.models import Site
from django.http import (HttpResponsePermanentRedirect, HttpResponseRedirect,
HttpResponse, Http404)
from django.shortcuts import render
from django.views.decorators.cache import never_cache
from django.views.decorators.clickjacking import xframe_options_exempt
from django.views.decorators.http import require_GET
import django_qunit.views
from celery.messaging import establish_connection
from mobility.decorators import mobile_template
from PIL import Image
from session_csrf import anonymous_csrf
from kitsune.sumo.decorators import cors_enabled
from kitsune.search import es_utils
from kitsune.sumo.redis_utils import redis_client, RedisError
from kitsune.sumo.urlresolvers import reverse
from kitsune.sumo.utils import get_next_url, rabbitmq_queue_size
from kitsune.users.forms import AuthenticationForm
log = logging.getLogger('k.services')
@never_cache
@mobile_template('sumo/{mobile/}locales.html')
def locales(request, template):
"""The locale switcher page."""
return render(request, template, dict(
next_url=get_next_url(request) or reverse('home')))
@anonymous_csrf
def handle403(request):
"""A 403 message that looks nicer than the normal Apache forbidden page"""
no_cookies = False
referer = request.META.get('HTTP_REFERER')
if referer:
no_cookies = (referer.endswith(reverse('users.login'))
or referer.endswith(reverse('users.register')))
return render(request, 'handlers/403.html', {
'form': AuthenticationForm(),
'no_cookies': no_cookies},
status=403)
def handle404(request):
"""A handler for 404s"""
return render(request, 'handlers/404.html', status=404)
def handle500(request):
"""A 500 message that looks nicer than the normal Apache error page"""
return render(request, 'handlers/500.html', status=500)
def redirect_to(request, url, permanent=True, **kwargs):
"""Like Django's redirect_to except that 'url' is passed to reverse."""
dest = reverse(url, kwargs=kwargs)
if permanent:
return HttpResponsePermanentRedirect(dest)
return HttpResponseRedirect(dest)
def deprecated_redirect(request, url, **kwargs):
"""Redirect with an interstitial page telling folks to update their
bookmarks.
"""
dest = reverse(url, kwargs=kwargs)
proto = 'https://' if request.is_secure() else 'http://'
host = Site.objects.get_current().domain
return render(request, 'sumo/deprecated.html', {
'dest': dest, 'proto': proto, 'host': host})
def robots(request):
"""Generate a robots.txt."""
if not settings.ENGAGE_ROBOTS:
template = 'User-Agent: *\nDisallow: /'
else:
template = render(request, 'sumo/robots.html')
return HttpResponse(template, mimetype='text/plain')
def test_memcached(host, port):
"""Connect to memcached.
:returns: True if test passed, False if test failed.
"""
try:
s = socket.socket()
s.connect((host, port))
return True
except Exception as exc:
log.critical('Failed to connect to memcached (%r): %s' %
((host, port), exc))
return False
finally:
s.close()
ERROR = 'ERROR'
INFO = 'INFO'
@never_cache
def monitor(request):
"""View for services monitor."""
status = {}
# Note: To add a new component to the services monitor, do your
# testing and then add a name -> list of output tuples map to
# status.
# Check memcached.
memcache_results = []
try:
for cache_name, cache_props in settings.CACHES.items():
result = True
backend = cache_props['BACKEND']
location = cache_props['LOCATION']
# LOCATION can be a string or a list of strings
if isinstance(location, basestring):
location = location.split(';')
if 'memcache' in backend:
for loc in location:
# TODO: this doesn't handle unix: variant
ip, port = loc.split(':')
result = test_memcached(ip, int(port))
memcache_results.append(
(INFO, '%s:%s %s' % (ip, port, result)))
if not memcache_results:
memcache_results.append((ERROR, 'memcache is not configured.'))
elif len(memcache_results) < 2:
memcache_results.append(
(ERROR, ('You should have at least 2 memcache servers. '
'You have %s.' % len(memcache_results))))
else:
memcache_results.append((INFO, 'memcached servers look good.'))
except Exception as exc:
memcache_results.append(
(ERROR, 'Exception while looking at memcached: %s' % str(exc)))
status['memcached'] = memcache_results
# Check Libraries and versions
libraries_results = []
try:
Image.new('RGB', (16, 16)).save(StringIO.StringIO(), 'JPEG')
libraries_results.append((INFO, 'PIL+JPEG: Got it!'))
except Exception as exc:
libraries_results.append(
(ERROR,
'PIL+JPEG: Probably missing: '
'Failed to create a jpeg image: %s' % exc))
status['libraries'] = libraries_results
# Check file paths.
msg = 'We want read + write.'
filepaths = (
(settings.USER_AVATAR_PATH, os.R_OK | os.W_OK, msg),
(settings.IMAGE_UPLOAD_PATH, os.R_OK | os.W_OK, msg),
(settings.THUMBNAIL_UPLOAD_PATH, os.R_OK | os.W_OK, msg),
(settings.GALLERY_IMAGE_PATH, os.R_OK | os.W_OK, msg),
(settings.GALLERY_IMAGE_THUMBNAIL_PATH, os.R_OK | os.W_OK, msg),
(settings.GALLERY_VIDEO_PATH, os.R_OK | os.W_OK, msg),
(settings.GALLERY_VIDEO_THUMBNAIL_PATH, os.R_OK | os.W_OK, msg),
(settings.GROUP_AVATAR_PATH, os.R_OK | os.W_OK, msg),
)
filepath_results = []
for path, perms, notes in filepaths:
path = os.path.join(settings.MEDIA_ROOT, path)
path_exists = os.path.isdir(path)
path_perms = os.access(path, perms)
if path_exists and path_perms:
filepath_results.append(
(INFO, '%s: %s %s %s' % (path, path_exists, path_perms,
notes)))
status['filepaths'] = filepath_results
# Check RabbitMQ.
rabbitmq_results = []
try:
rabbit_conn = establish_connection(connect_timeout=2)
rabbit_conn.connect()
rabbitmq_results.append(
(INFO, 'Successfully connected to RabbitMQ.'))
rabbitmq_results.append(
(INFO, 'Queue size: %s' % rabbitmq_queue_size()))
except (socket.error, IOError) as exc:
rabbitmq_results.append(
(ERROR, 'Error connecting to RabbitMQ: %s' % str(exc)))
except Exception as exc:
rabbitmq_results.append(
(ERROR, 'Exception while looking at RabbitMQ: %s' % str(exc)))
status['RabbitMQ'] = rabbitmq_results
# Check ES.
es_results = []
try:
es_utils.get_doctype_stats(es_utils.all_read_indexes()[0])
es_results.append(
(INFO, ('Successfully connected to ElasticSearch and index '
'exists.')))
except es_utils.ES_EXCEPTIONS as exc:
es_results.append(
(ERROR, 'ElasticSearch problem: %s' % str(exc)))
except Exception as exc:
es_results.append(
(ERROR, 'Exception while looking at ElasticSearch: %s' % str(exc)))
status['ElasticSearch'] = es_results
# Check Celery.
# start = time.time()
# pong = celery.task.ping()
# rabbit_results = r = {'duration': time.time() - start}
# status_summary['rabbit'] = pong == 'pong' and r['duration'] < 1
# Check Redis.
redis_results = []
if hasattr(settings, 'REDIS_BACKENDS'):
for backend in settings.REDIS_BACKENDS:
try:
redis_client(backend)
redis_results.append((INFO, '%s: Pass!' % backend))
except RedisError:
redis_results.append((ERROR, '%s: Fail!' % backend))
status['Redis'] = redis_results
status_code = 200
status_summary = {}
for component, output in status.items():
if ERROR in [item[0] for item in output]:
status_code = 500
status_summary[component] = False
else:
status_summary[component] = True
return render(request, 'services/monitor.html', {
'component_status': status,
'status_summary': status_summary},
status=status_code)
@never_cache
def error(request):
if not getattr(settings, 'STAGE', False):
raise Http404
# Do something stupid.
fu # noqa
@require_GET
@never_cache
def version_check(request):
mime = 'application/x-json'
token = settings.VERSION_CHECK_TOKEN
if (token is None or not 'token' in request.GET or
token != request.GET['token']):
return HttpResponse(status=403, mimetype=mime)
versions = {
'django': '.'.join(map(str, django.VERSION)),
}
return HttpResponse(json.dumps(versions), mimetype=mime)
# Allows another site to embed the QUnit suite
# in an iframe (for CI).
@xframe_options_exempt
def kitsune_qunit(request, path):
"""View that hosts QUnit tests."""
ctx = django_qunit.views.get_suite_context(request, path)
ctx.update(timestamp=time())
return render(request, 'tests/qunit.html', ctx)
@cors_enabled('*')
def serve_cors(*args, **kwargs):
"""A wrapper around django.views.static.serve that adds CORS headers."""
if not settings.DEBUG:
raise RuntimeError("Don't use kitsune.sumo.views.serve_cors "
"in production.")
from django.views.static import serve
return serve(*args, **kwargs)
| dbbhattacharya/kitsune | kitsune/sumo/views.py | Python | bsd-3-clause | 10,010 |
import unittest
from testlib import testutil, PygrTestProgram
import ConfigParser, sys, os, string
from pygr.mapping import Collection
import pygr.Data
try:
import hashlib
except ImportError:
import md5 as hashlib
config = ConfigParser.ConfigParser({'testOutputBaseDir' : '.', 'smallSampleKey': ''})
config.read([ os.path.join(os.path.expanduser('~'), '.pygrrc'), os.path.join(os.path.expanduser('~'), 'pygr.cfg'), '.pygrrc', 'pygr.cfg' ])
msaDir = config.get('megatests_dm2', 'msaDir')
seqDir = config.get('megatests_dm2', 'seqDir')
smallSampleKey = config.get('megatests_dm2', 'smallSampleKey')
testInputDB = config.get('megatests', 'testInputDB')
testInputDir = config.get('megatests', 'testInputDir')
testOutputBaseDir = config.get('megatests', 'testOutputBaseDir')
if smallSampleKey:
smallSamplePostfix = '_' + smallSampleKey
else:
smallSamplePostfix = ''
## msaDir CONTAINS PRE-BUILT NLMSA
## seqDir CONTAINS GENOME ASSEMBLIES AND THEIR SEQDB FILES
## TEST INPUT/OUPTUT FOR COMPARISON, THESE FILES SHOULD BE IN THIS DIRECTORY
## exonAnnotFileName = 'Annotation_ConservedElement_Exons_dm2.txt'
## intronAnnotFileName = 'Annotation_ConservedElement_Introns_dm2.txt'
## stopAnnotFileName = 'Annotation_ConservedElement_Stop_dm2.txt'
## testDir = os.path.join(testOutputBaseDir, 'TEST_' + ''.join(tmpList)) SHOULD BE DELETED IF YOU WANT TO RUN IN '.'
# DIRECTIONARY FOR DOC STRING OF SEQDB
docStringDict = {
'anoGam1':'A. gambiae Genome (February 2003)',
'apiMel2':'A. mellifera Genome (January 2005)',
'dm2':'D. melanogaster Genome (April 2004)',
'dp4':'D. pseudoobscura Genome (February 2006)',
'droAna3':'D. ananassae Genome (February 2006)',
'droEre2':'D. erecta Genome (February 2006)',
'droGri2':'D. grimshawi Genome (February 2006)',
'droMoj3':'D. mojavensis Genome (February 2006)',
'droPer1':'D. persimilis Genome (October 2005)',
'droSec1':'D. sechellia Genome (October 2005)',
'droSim1':'D. simulans Genome (April 2005)',
'droVir3':'D. virilis Genome (February 2006)',
'droWil1':'D. willistoni Genome (February 2006)',
'droYak2':'D. yakuba Genome (November 2005)',
'triCas2':'T. castaneum Genome (September 2005)',
}
# GENOME ASSEMBLY LIST FOR DM2 MULTIZ15WAY
msaSpeciesList = ['anoGam1', 'apiMel2', 'dm2', 'dp4', 'droAna3', 'droEre2', 'droGri2', 'droMoj3', \
'droPer1', 'droSec1', 'droSim1', 'droVir3', 'droWil1', 'droYak2', 'triCas2']
class PygrBuildNLMSAMegabase(unittest.TestCase):
'restrict megatest to an initially empty directory, need large space to perform'
def setUp(self, testDir = None):
import random
tmpList = [c for c in 'PygrBuildNLMSAMegabase']
random.shuffle(tmpList)
testDir = os.path.join(testOutputBaseDir, 'TEST_' + ''.join(tmpList)) # FOR TEST, SHOULD BE DELETED
if testDir is None: testDir = 'TEST_' + ''.join(tmpList) # NOT SPECIFIED, USE CURRENT DIRECTORY
try:
os.mkdir(testDir)
testDir = os.path.realpath(testDir)
except:
raise IOError
self.path = testDir
try:
tmpFileName = os.path.join(testDir, 'DELETE_THIS_TEMP_FILE')
open(tmpFileName, 'w').write('A'*1024*1024) # WRITE 1MB FILE FOR TESTING
except:
raise IOError
pygr.Data.update(self.path)
from pygr import seqdb
for orgstr in msaSpeciesList:
genome = seqdb.BlastDB(os.path.join(seqDir, orgstr))
genome.__doc__ = docStringDict[orgstr]
pygr.Data.addResource('TEST.Seq.Genome.' + orgstr, genome)
pygr.Data.save()
def copyFile(self, filename): # COPY A FILE INTO TEST DIRECTORY
newname = os.path.join(self.path, os.path.basename(filename))
open(newname, 'w').write(open(filename, 'r').read())
return newname
def tearDown(self):
'delete the temporary directory and files, restore pygr.Data path'
for dirpath, subdirs, files in os.walk(self.path, topdown = False): # SHOULD BE DELETED BOTTOM-UP FASHION
# THIS PART MAY NOT WORK IN NFS MOUNTED DIRECTORY DUE TO .nfsXXXXXXXXX CREATION
# IN NFS MOUNTED DIRECTORY, IT CANNOT BE DELETED UNTIL CLOSING PYGRDATA
for filename in files:
os.remove(os.path.join(dirpath, filename))
os.rmdir(dirpath)
# Restore original pygr.Data path to remedy lack of isolation
# between tests from the same run
pygr.Data.update(None)
class Build_Test(PygrBuildNLMSAMegabase):
def test_seqdb(self):
'Check pygr.Data contents'
l = pygr.Data.dir('TEST')
preList = ['TEST.Seq.Genome.' + orgstr for orgstr in msaSpeciesList]
assert l == preList
def test_collectionannot(self):
'Test building an AnnotationDB from file'
from pygr import seqdb, cnestedlist, sqlgraph
dm2 = pygr.Data.getResource('TEST.Seq.Genome.dm2')
# BUILD ANNOTATION DATABASE FOR REFSEQ EXONS
exon_slices = Collection(filename = os.path.join(self.path, 'refGene_exonAnnot_dm2.cdb'), \
intKeys = True, mode = 'cr', writeback = False)
exon_db = seqdb.AnnotationDB(exon_slices, dm2,
sliceAttrDict = dict(id = 0, exon_id = 1, orientation = 2,
gene_id = 3, start = 4, stop = 5))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'refGene_exonAnnot_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for lines in open(os.path.join(testInputDir, 'refGene_exonAnnot%s_dm2.txt' % smallSamplePostfix), 'r').xreadlines():
row = [x for x in lines.split('\t')] # CONVERT TO LIST SO MUTABLE
row[1] = int(row[1]) # CONVERT FROM STRING TO INTEGER
exon_slices[row[1]] = row
exon = exon_db[row[1]] # GET THE ANNOTATION OBJECT FOR THIS EXON
msa.addAnnotation(exon) # SAVE IT TO GENOME MAPPING
exon_db.clear_cache() # not really necessary; cache should autoGC
exon_slices.close() # SHELVE SHOULD BE EXPLICITLY CLOSED IN ORDER TO SAVE CURRENT CONTENTS
msa.build() # FINALIZE GENOME ALIGNMENT INDEXES
exon_db.__doc__ = 'Exon Annotation Database for dm2'
pygr.Data.addResource('TEST.Annotation.dm2.exons', exon_db)
msa.__doc__ = 'NLMSA Exon for dm2'
pygr.Data.addResource('TEST.Annotation.NLMSA.dm2.exons', msa)
exon_schema = pygr.Data.ManyToManyRelation(dm2, exon_db, bindAttrs = ('exon1',))
exon_schema.__doc__ = 'Exon Schema for dm2'
pygr.Data.addSchema('TEST.Annotation.NLMSA.dm2.exons', exon_schema)
# BUILD ANNOTATION DATABASE FOR REFSEQ SPLICES
splice_slices = Collection(filename = os.path.join(self.path, 'refGene_spliceAnnot_dm2.cdb'), \
intKeys = True, mode = 'cr', writeback = False)
splice_db = seqdb.AnnotationDB(splice_slices, dm2,
sliceAttrDict = dict(id = 0, splice_id = 1, orientation = 2,
gene_id = 3, start = 4, stop = 5))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'refGene_spliceAnnot_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for lines in open(os.path.join(testInputDir, 'refGene_spliceAnnot%s_dm2.txt' % smallSamplePostfix), 'r').xreadlines():
row = [x for x in lines.split('\t')] # CONVERT TO LIST SO MUTABLE
row[1] = int(row[1]) # CONVERT FROM STRING TO INTEGER
splice_slices[row[1]] = row
splice = splice_db[row[1]] # GET THE ANNOTATION OBJECT FOR THIS EXON
msa.addAnnotation(splice) # SAVE IT TO GENOME MAPPING
splice_db.clear_cache() # not really necessary; cache should autoGC
splice_slices.close() # SHELVE SHOULD BE EXPLICITLY CLOSED IN ORDER TO SAVE CURRENT CONTENTS
msa.build() # FINALIZE GENOME ALIGNMENT INDEXES
splice_db.__doc__ = 'Splice Annotation Database for dm2'
pygr.Data.addResource('TEST.Annotation.dm2.splices', splice_db)
msa.__doc__ = 'NLMSA Splice for dm2'
pygr.Data.addResource('TEST.Annotation.NLMSA.dm2.splices', msa)
splice_schema = pygr.Data.ManyToManyRelation(dm2, splice_db, bindAttrs = ('splice1',))
splice_schema.__doc__ = 'Splice Schema for dm2'
pygr.Data.addSchema('TEST.Annotation.NLMSA.dm2.splices', splice_schema)
# BUILD ANNOTATION DATABASE FOR MOST CONSERVED ELEMENTS FROM UCSC
ucsc_slices = Collection(filename = os.path.join(self.path, 'phastConsElements15way_dm2.cdb'), \
intKeys = True, mode = 'cr', writeback = False)
ucsc_db = seqdb.AnnotationDB(ucsc_slices, dm2,
sliceAttrDict = dict(id = 0, ucsc_id = 1, orientation = 2,
gene_id = 3, start = 4, stop = 5))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'phastConsElements15way_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for lines in open(os.path.join(testInputDir, 'phastConsElements15way%s_dm2.txt' % smallSamplePostfix), 'r').xreadlines():
row = [x for x in lines.split('\t')] # CONVERT TO LIST SO MUTABLE
row[1] = int(row[1]) # CONVERT FROM STRING TO INTEGER
ucsc_slices[row[1]] = row
ucsc = ucsc_db[row[1]] # GET THE ANNOTATION OBJECT FOR THIS EXON
msa.addAnnotation(ucsc) # SAVE IT TO GENOME MAPPING
ucsc_db.clear_cache() # not really necessary; cache should autoGC
ucsc_slices.close() # SHELVE SHOULD BE EXPLICITLY CLOSED IN ORDER TO SAVE CURRENT CONTENTS
msa.build() # FINALIZE GENOME ALIGNMENT INDEXES
ucsc_db.__doc__ = 'Most Conserved Elements for dm2'
pygr.Data.addResource('TEST.Annotation.UCSC.dm2.mostconserved', ucsc_db)
msa.__doc__ = 'NLMSA for Most Conserved Elements for dm2'
pygr.Data.addResource('TEST.Annotation.UCSC.NLMSA.dm2.mostconserved', msa)
ucsc_schema = pygr.Data.ManyToManyRelation(dm2, ucsc_db, bindAttrs = ('element1',))
ucsc_schema.__doc__ = 'Schema for UCSC Most Conserved Elements for dm2'
pygr.Data.addSchema('TEST.Annotation.UCSC.NLMSA.dm2.mostconserved', ucsc_schema)
pygr.Data.save()
pygr.Data.clear_cache() # force resources to reload when requested
# QUERY TO EXON AND SPLICES ANNOTATION DATABASE
dm2 = pygr.Data.getResource('TEST.Seq.Genome.dm2')
exonmsa = pygr.Data.getResource('TEST.Annotation.NLMSA.dm2.exons')
splicemsa = pygr.Data.getResource('TEST.Annotation.NLMSA.dm2.splices')
conservedmsa = pygr.Data.getResource('TEST.Annotation.UCSC.NLMSA.dm2.mostconserved')
exons = pygr.Data.getResource('TEST.Annotation.dm2.exons')
splices = pygr.Data.getResource('TEST.Annotation.dm2.splices')
mostconserved = pygr.Data.getResource('TEST.Annotation.UCSC.dm2.mostconserved')
# OPEN DM2_MULTIZ15WAY NLMSA
msa = cnestedlist.NLMSA(os.path.join(msaDir, 'dm2_multiz15way'), 'r', trypath = [seqDir])
exonAnnotFileName = os.path.join(testInputDir, 'Annotation_ConservedElement_Exons%s_dm2.txt' % smallSamplePostfix)
intronAnnotFileName = os.path.join(testInputDir, 'Annotation_ConservedElement_Introns%s_dm2.txt' % smallSamplePostfix)
newexonAnnotFileName = os.path.join(self.path, 'new_Exons_dm2.txt')
newintronAnnotFileName = os.path.join(self.path, 'new_Introns_dm2.txt')
tmpexonAnnotFileName = self.copyFile(exonAnnotFileName)
tmpintronAnnotFileName = self.copyFile(intronAnnotFileName)
if smallSampleKey:
chrList = [ smallSampleKey ]
else:
chrList = dm2.seqLenDict.keys()
chrList.sort()
outfile = open(newexonAnnotFileName, 'w')
for chrid in chrList:
slice = dm2[chrid]
try:
ex1 = exonmsa[slice]
except KeyError:
continue
else:
exlist1 = [(ix.exon_id, ix) for ix in ex1.keys()]
exlist1.sort()
for ixx, exon in exlist1:
saveList = []
tmp = exon.sequence
tmpexon = exons[exon.exon_id]
tmpslice = tmpexon.sequence # FOR REAL EXON COORDINATE
wlist1 = 'EXON', chrid, tmpexon.exon_id, tmpexon.gene_id, tmpslice.start, tmpslice.stop
try:
out1 = conservedmsa[tmp]
except KeyError:
pass
else:
elementlist = [(ix.ucsc_id, ix) for ix in out1.keys()]
elementlist.sort()
for iyy, element in elementlist:
if element.stop - element.start < 100: continue
score = int(string.split(element.gene_id, '=')[1])
if score < 100: continue
tmp2 = element.sequence
tmpelement = mostconserved[element.ucsc_id]
tmpslice2 = tmpelement.sequence # FOR REAL ELEMENT COORDINATE
wlist2 = wlist1 + (tmpelement.ucsc_id, tmpelement.gene_id, tmpslice2.start, tmpslice2.stop)
slicestart, sliceend = max(tmp.start, tmp2.start), min(tmp.stop, tmp2.stop)
tmp1 = msa.seqDict['dm2.' + chrid][slicestart:sliceend]
edges = msa[tmp1].edges()
for src, dest, e in edges:
if src.stop - src.start < 100: continue
palign, pident = e.pAligned(), e.pIdentity()
if palign < 0.8 or pident < 0.8: continue
palign, pident = '%.2f' % palign, '%.2f' % pident
wlist3 = wlist2 + ((~msa.seqDict)[src], str(src), src.start, src.stop, \
(~msa.seqDict)[dest], \
str(dest), dest.start, dest.stop, palign, pident)
saveList.append('\t'.join(map(str, wlist3)) + '\n')
saveList.sort()
for saveline in saveList:
outfile.write(saveline)
outfile.close()
md5old = hashlib.md5()
md5old.update(open(tmpexonAnnotFileName, 'r').read())
md5new = hashlib.md5()
md5new.update(open(newexonAnnotFileName, 'r').read())
assert md5old.digest() == md5new.digest() # MD5 COMPARISON INSTEAD OF COMPARING EACH CONTENTS
outfile = open(newintronAnnotFileName, 'w')
for chrid in chrList:
slice = dm2[chrid]
try:
sp1 = splicemsa[slice]
except:
continue
else:
splist1 = [(ix.splice_id, ix) for ix in sp1.keys()]
splist1.sort()
for ixx, splice in splist1:
saveList = []
tmp = splice.sequence
tmpsplice = splices[splice.splice_id]
tmpslice = tmpsplice.sequence # FOR REAL EXON COORDINATE
wlist1 = 'INTRON', chrid, tmpsplice.splice_id, tmpsplice.gene_id, tmpslice.start, tmpslice.stop
try:
out1 = conservedmsa[tmp]
except KeyError:
pass
else:
elementlist = [(ix.ucsc_id, ix) for ix in out1.keys()]
elementlist.sort()
for iyy, element in elementlist:
if element.stop - element.start < 100: continue
score = int(string.split(element.gene_id, '=')[1])
if score < 100: continue
tmp2 = element.sequence
tmpelement = mostconserved[element.ucsc_id]
tmpslice2 = tmpelement.sequence # FOR REAL ELEMENT COORDINATE
wlist2 = wlist1 + (tmpelement.ucsc_id, tmpelement.gene_id, tmpslice2.start, tmpslice2.stop)
slicestart, sliceend = max(tmp.start, tmp2.start), min(tmp.stop, tmp2.stop)
tmp1 = msa.seqDict['dm2.' + chrid][slicestart:sliceend]
edges = msa[tmp1].edges()
for src, dest, e in edges:
if src.stop - src.start < 100: continue
palign, pident = e.pAligned(), e.pIdentity()
if palign < 0.8 or pident < 0.8: continue
palign, pident = '%.2f' % palign, '%.2f' % pident
wlist3 = wlist2 + ((~msa.seqDict)[src], str(src), src.start, src.stop, \
(~msa.seqDict)[dest], \
str(dest), dest.start, dest.stop, palign, pident)
saveList.append('\t'.join(map(str, wlist3)) + '\n')
saveList.sort()
for saveline in saveList:
outfile.write(saveline)
outfile.close()
md5old = hashlib.md5()
md5old.update(open(tmpintronAnnotFileName, 'r').read())
md5new = hashlib.md5()
md5new.update(open(newintronAnnotFileName, 'r').read())
assert md5old.digest() == md5new.digest() # MD5 COMPARISON INSTEAD OF COMPARING EACH CONTENTS
def test_mysqlannot(self):
'Test building an AnnotationDB from MySQL'
from pygr import seqdb, cnestedlist, sqlgraph
dm2 = pygr.Data.getResource('TEST.Seq.Genome.dm2')
# BUILD ANNOTATION DATABASE FOR REFSEQ EXONS: MYSQL VERSION
exon_slices = sqlgraph.SQLTableClustered('%s.pygr_refGene_exonAnnot%s_dm2' % ( testInputDB, smallSamplePostfix ),
clusterKey = 'chromosome', maxCache = 0)
exon_db = seqdb.AnnotationDB(exon_slices, dm2, sliceAttrDict = dict(id = 'chromosome', \
gene_id = 'name', exon_id = 'exon_id'))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'refGene_exonAnnot_SQL_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for id in exon_db:
msa.addAnnotation(exon_db[id])
exon_db.clear_cache() # not really necessary; cache should autoGC
exon_slices.clear_cache()
msa.build()
exon_db.__doc__ = 'SQL Exon Annotation Database for dm2'
pygr.Data.addResource('TEST.Annotation.SQL.dm2.exons', exon_db)
msa.__doc__ = 'SQL NLMSA Exon for dm2'
pygr.Data.addResource('TEST.Annotation.NLMSA.SQL.dm2.exons', msa)
exon_schema = pygr.Data.ManyToManyRelation(dm2, exon_db, bindAttrs = ('exon2',))
exon_schema.__doc__ = 'SQL Exon Schema for dm2'
pygr.Data.addSchema('TEST.Annotation.NLMSA.SQL.dm2.exons', exon_schema)
# BUILD ANNOTATION DATABASE FOR REFSEQ SPLICES: MYSQL VERSION
splice_slices = sqlgraph.SQLTableClustered('%s.pygr_refGene_spliceAnnot%s_dm2' % ( testInputDB, smallSamplePostfix ),
clusterKey = 'chromosome', maxCache = 0)
splice_db = seqdb.AnnotationDB(splice_slices, dm2, sliceAttrDict = dict(id = 'chromosome', \
gene_id = 'name', splice_id = 'splice_id'))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'refGene_spliceAnnot_SQL_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for id in splice_db:
msa.addAnnotation(splice_db[id])
splice_db.clear_cache() # not really necessary; cache should autoGC
splice_slices.clear_cache()
msa.build()
splice_db.__doc__ = 'SQL Splice Annotation Database for dm2'
pygr.Data.addResource('TEST.Annotation.SQL.dm2.splices', splice_db)
msa.__doc__ = 'SQL NLMSA Splice for dm2'
pygr.Data.addResource('TEST.Annotation.NLMSA.SQL.dm2.splices', msa)
splice_schema = pygr.Data.ManyToManyRelation(dm2, splice_db, bindAttrs = ('splice2',))
splice_schema.__doc__ = 'SQL Splice Schema for dm2'
pygr.Data.addSchema('TEST.Annotation.NLMSA.SQL.dm2.splices', splice_schema)
# BUILD ANNOTATION DATABASE FOR MOST CONSERVED ELEMENTS FROM UCSC: MYSQL VERSION
ucsc_slices = sqlgraph.SQLTableClustered('%s.pygr_phastConsElements15way%s_dm2' % ( testInputDB, smallSamplePostfix ),
clusterKey = 'chromosome', maxCache = 0)
ucsc_db = seqdb.AnnotationDB(ucsc_slices, dm2, sliceAttrDict = dict(id = 'chromosome', \
gene_id = 'name', ucsc_id = 'ucsc_id'))
msa = cnestedlist.NLMSA(os.path.join(self.path, 'phastConsElements15way_SQL_dm2'), 'w', \
pairwiseMode = True, bidirectional = False)
for id in ucsc_db:
msa.addAnnotation(ucsc_db[id])
ucsc_db.clear_cache() # not really necessary; cache should autoGC
ucsc_slices.clear_cache()
msa.build()
ucsc_db.__doc__ = 'SQL Most Conserved Elements for dm2'
pygr.Data.addResource('TEST.Annotation.UCSC.SQL.dm2.mostconserved', ucsc_db)
msa.__doc__ = 'SQL NLMSA for Most Conserved Elements for dm2'
pygr.Data.addResource('TEST.Annotation.UCSC.NLMSA.SQL.dm2.mostconserved', msa)
ucsc_schema = pygr.Data.ManyToManyRelation(dm2, ucsc_db, bindAttrs = ('element2',))
ucsc_schema.__doc__ = 'SQL Schema for UCSC Most Conserved Elements for dm2'
pygr.Data.addSchema('TEST.Annotation.UCSC.NLMSA.SQL.dm2.mostconserved', ucsc_schema)
pygr.Data.save()
pygr.Data.clear_cache()
# QUERY TO EXON AND SPLICES ANNOTATION DATABASE
dm2 = pygr.Data.getResource('TEST.Seq.Genome.dm2')
exonmsa = pygr.Data.getResource('TEST.Annotation.NLMSA.SQL.dm2.exons')
splicemsa = pygr.Data.getResource('TEST.Annotation.NLMSA.SQL.dm2.splices')
conservedmsa = pygr.Data.getResource('TEST.Annotation.UCSC.NLMSA.SQL.dm2.mostconserved')
exons = pygr.Data.getResource('TEST.Annotation.SQL.dm2.exons')
splices = pygr.Data.getResource('TEST.Annotation.SQL.dm2.splices')
mostconserved = pygr.Data.getResource('TEST.Annotation.UCSC.SQL.dm2.mostconserved')
# OPEN DM2_MULTIZ15WAY NLMSA
msa = cnestedlist.NLMSA(os.path.join(msaDir, 'dm2_multiz15way'), 'r', trypath = [seqDir])
exonAnnotFileName = os.path.join(testInputDir, 'Annotation_ConservedElement_Exons%s_dm2.txt' % smallSamplePostfix)
intronAnnotFileName = os.path.join(testInputDir, 'Annotation_ConservedElement_Introns%s_dm2.txt' % smallSamplePostfix)
newexonAnnotFileName = os.path.join(self.path, 'new_Exons_dm2.txt')
newintronAnnotFileName = os.path.join(self.path, 'new_Introns_dm2.txt')
tmpexonAnnotFileName = self.copyFile(exonAnnotFileName)
tmpintronAnnotFileName = self.copyFile(intronAnnotFileName)
if smallSampleKey:
chrList = [ smallSampleKey ]
else:
chrList = dm2.seqLenDict.keys()
chrList.sort()
outfile = open(newexonAnnotFileName, 'w')
for chrid in chrList:
slice = dm2[chrid]
try:
ex1 = exonmsa[slice]
except KeyError:
continue
else:
exlist1 = [(ix.exon_id, ix) for ix in ex1.keys()]
exlist1.sort()
for ixx, exon in exlist1:
saveList = []
tmp = exon.sequence
tmpexon = exons[exon.exon_id]
tmpslice = tmpexon.sequence # FOR REAL EXON COORDINATE
wlist1 = 'EXON', chrid, tmpexon.exon_id, tmpexon.gene_id, tmpslice.start, tmpslice.stop
try:
out1 = conservedmsa[tmp]
except KeyError:
pass
else:
elementlist = [(ix.ucsc_id, ix) for ix in out1.keys()]
elementlist.sort()
for iyy, element in elementlist:
if element.stop - element.start < 100: continue
score = int(string.split(element.gene_id, '=')[1])
if score < 100: continue
tmp2 = element.sequence
tmpelement = mostconserved[element.ucsc_id]
tmpslice2 = tmpelement.sequence # FOR REAL ELEMENT COORDINATE
wlist2 = wlist1 + (tmpelement.ucsc_id, tmpelement.gene_id, tmpslice2.start, tmpslice2.stop)
slicestart, sliceend = max(tmp.start, tmp2.start), min(tmp.stop, tmp2.stop)
tmp1 = msa.seqDict['dm2.' + chrid][slicestart:sliceend]
edges = msa[tmp1].edges()
for src, dest, e in edges:
if src.stop - src.start < 100: continue
palign, pident = e.pAligned(), e.pIdentity()
if palign < 0.8 or pident < 0.8: continue
palign, pident = '%.2f' % palign, '%.2f' % pident
wlist3 = wlist2 + ((~msa.seqDict)[src], str(src), src.start, src.stop, \
(~msa.seqDict)[dest], \
str(dest), dest.start, dest.stop, palign, pident)
saveList.append('\t'.join(map(str, wlist3)) + '\n')
saveList.sort()
for saveline in saveList:
outfile.write(saveline)
outfile.close()
md5old = hashlib.md5()
md5old.update(open(tmpexonAnnotFileName, 'r').read())
md5new = hashlib.md5()
md5new.update(open(newexonAnnotFileName, 'r').read())
assert md5old.digest() == md5new.digest() # MD5 COMPARISON INSTEAD OF COMPARING EACH CONTENTS
outfile = open(newintronAnnotFileName, 'w')
for chrid in chrList:
slice = dm2[chrid]
try:
sp1 = splicemsa[slice]
except:
continue
else:
splist1 = [(ix.splice_id, ix) for ix in sp1.keys()]
splist1.sort()
for ixx, splice in splist1:
saveList = []
tmp = splice.sequence
tmpsplice = splices[splice.splice_id]
tmpslice = tmpsplice.sequence # FOR REAL EXON COORDINATE
wlist1 = 'INTRON', chrid, tmpsplice.splice_id, tmpsplice.gene_id, tmpslice.start, tmpslice.stop
try:
out1 = conservedmsa[tmp]
except KeyError:
pass
else:
elementlist = [(ix.ucsc_id, ix) for ix in out1.keys()]
elementlist.sort()
for iyy, element in elementlist:
if element.stop - element.start < 100: continue
score = int(string.split(element.gene_id, '=')[1])
if score < 100: continue
tmp2 = element.sequence
tmpelement = mostconserved[element.ucsc_id]
tmpslice2 = tmpelement.sequence # FOR REAL ELEMENT COORDINATE
wlist2 = wlist1 + (tmpelement.ucsc_id, tmpelement.gene_id, tmpslice2.start, tmpslice2.stop)
slicestart, sliceend = max(tmp.start, tmp2.start), min(tmp.stop, tmp2.stop)
tmp1 = msa.seqDict['dm2.' + chrid][slicestart:sliceend]
edges = msa[tmp1].edges()
for src, dest, e in edges:
if src.stop - src.start < 100: continue
palign, pident = e.pAligned(), e.pIdentity()
if palign < 0.8 or pident < 0.8: continue
palign, pident = '%.2f' % palign, '%.2f' % pident
wlist3 = wlist2 + ((~msa.seqDict)[src], str(src), src.start, src.stop, \
(~msa.seqDict)[dest], \
str(dest), dest.start, dest.stop, palign, pident)
saveList.append('\t'.join(map(str, wlist3)) + '\n')
saveList.sort()
for saveline in saveList:
outfile.write(saveline)
outfile.close()
md5old = hashlib.md5()
md5old.update(open(tmpintronAnnotFileName, 'r').read())
md5new = hashlib.md5()
md5new.update(open(newintronAnnotFileName, 'r').read())
assert md5old.digest() == md5new.digest() # MD5 COMPARISON INSTEAD OF COMPARING EACH CONTENTS
if __name__ == '__main__':
PygrTestProgram(verbosity=2)
| ctb/pygr | tests/annotation_dm2_megatest.py | Python | bsd-3-clause | 29,345 |
"""
The I_min measure as proposed by Williams & Beer.
"""
import numpy as np
from ..pid import BasePID
__all__ = (
'PID_WB',
)
def s_i(d, source, target, target_value):
"""
Compute the specific mutual information I(source : target=target_value)
Parameters
----------
d : Distribution
The distribution from which this quantity is to be calculated.
source : iterable
The source aggregate variable.
target : iterable
The target aggregate variable.
target_value : iterable
The value of the target.
Returns
-------
s : float
The specific information
"""
pp_s, pp_a_s = d.condition_on(target, rvs=source)
p_s = pp_s[target_value]
p_a_s = pp_a_s[pp_s.outcomes.index(target_value)]
pp_a, pp_s_a = d.condition_on(source, rvs=target)
p_s_a = {a: pp[target_value] for a, pp in zip(pp_a.outcomes, pp_s_a)}
return np.nansum([p_a_s[a] * np.log2(psa / p_s) for a, psa in p_s_a.items()])
class PID_WB(BasePID):
"""
The Williams & Beer partial information decomposition.
"""
_name = "I_min"
@staticmethod
def _measure(d, sources, target):
"""
Compute I_min(sources : target) =
\\sum_{s \\in target} p(s) min_{source \\in sources} I(source : target=s)
Parameters
----------
d : Distribution
The distribution to compute i_min for.
sources : iterable of iterables
The source variables.
target : iterable
The target variable.
Returns
-------
imin : float
The value of I_min.
"""
p_s = d.marginal(target)
return sum(p_s[s] * min(s_i(d, source, target, s) for source in sources) for s in p_s.outcomes)
| dit/dit | dit/pid/measures/imin.py | Python | bsd-3-clause | 1,798 |
from django.contrib.sitemaps import Sitemap
from models import Post, Category, Series
class PostSitemap(Sitemap):
"""
Post sitemap
"""
def items(self):
return Post.published.all()
def lastmod(self, obj):
return obj.last_modified
class CategorySitemap(Sitemap):
"""
Category sitemap
"""
def items(self):
return Category.objects.all()
class SeriesSitemap(Sitemap):
"""
Series sitemap
"""
def items(self):
return Series.objects.all()
def lastmod(self, obj):
return obj.created_on
| davisd/django-blogyall | blog/sitemaps.py | Python | bsd-3-clause | 586 |
# -*- coding: utf8 -*-
from datetime import date, datetime, time
import isodate
from django.forms.widgets import Input
__all__ = 'ISO8601DateInput', 'ISO8601DatetimeInput', 'ISO8601TimeInput'
class ISO8601DateInput(Input):
input_type = 'text'
def __init__(self, attrs=None, format="%Y-%m-%d", yeardigits=4):
super(ISO8601DateInput, self).__init__(attrs)
self.format = format
self.yeardigits = yeardigits
def format_value(self, value):
if isinstance(value, date):
return isodate.date_isoformat(value, self.format, self.yeardigits)
return value
class ISO8601DatetimeInput(Input):
input_type = 'text'
def __init__(self, attrs=None, format="%Y-%m-%dT%H:%M:%S%Z"):
super(ISO8601DatetimeInput, self).__init__(attrs)
self.format = format
def format_value(self, value):
if isinstance(value, datetime):
return isodate.datetime_isoformat(value, self.format)
return value
class ISO8601TimeInput(Input):
input_type = 'text'
def __init__(self, attrs=None, format="%H:%M:%S%Z"):
super(ISO8601TimeInput, self).__init__(attrs)
self.format = format
def format_value(self, value):
if isinstance(value, time):
try:
return isodate.time_isoformat(value, self.format)
except:
return repr(value)
return value
| k0001/django-iso8601 | django_iso8601/widgets.py | Python | bsd-3-clause | 1,423 |
# -*- coding: utf-8 -*-
"""
© Copyright 2014-2015, by Serge Domkowski.
.. note::
This code includes a few modifications to rules in the FIQL draft.
The rule defined for ``Comparison`` has been modifed to deal with an
inconsistency in the draft documentation. The change fixes an issue where
the string "==" was NOT a valid ``Comparison`` and thus made most of
the examples in the FIQL draft incorrect.
The accepted arg chars to have been modifed to include ":". This change
fixes the issue where :rfc:`3339` compliant DateTime values were not valid
unless the ":" was percent-encoded. This contradicted the FIQL draft
``date_str`` examples. Since ":" is a valid character in an HTTP query
``*( pchar / "/" / "?" )``, I opted to fix the issue by simply allowing
the ":" in addition to the other arg chars.
"""
from __future__ import unicode_literals
from __future__ import absolute_import
__version__ = "0.15"
from .exceptions import FiqlException
from .exceptions import FiqlObjectException, FiqlFormatException
from .operator import Operator
from .constraint import Constraint
from .expression import Expression
from .parser import parse_str_to_expression
| sergedomk/fiql_parser | fiql_parser/__init__.py | Python | bsd-3-clause | 1,209 |
###############################################################################
# KingPotential.py: Potential of a King profile
###############################################################################
import numpy
from ..util import conversion
from .Force import Force
from .interpSphericalPotential import interpSphericalPotential
class KingPotential(interpSphericalPotential):
"""KingPotential.py: Potential of a King profile, defined from the distribution function
.. math::
f(\\mathcal{E}) = \begin{cases} \\rho_1\\,(2\\pi\\sigma^2)^{-3/2}\\,\\left(e^{\\mathcal{E}/\\sigma^2}-1\\right), & \mathcal{E} > 0\\
0, & \mathcal{E} \leq 0\end{cases}
where :math:`\mathcal{E}` is the binding energy.
"""
def __init__(self,W0=2.,M=3.,rt=1.5,npt=1001,_sfkdf=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
Initialize a King potential
INPUT:
W0= (2.) dimensionless central potential W0 = Psi(0)/sigma^2 (in practice, needs to be <~ 200, where the DF is essentially isothermal)
M= (1.) total mass (can be a Quantity)
rt= (1.) tidal radius (can be a Quantity)
npt= (1001) number of points to use to solve for Psi(r) when solving the King DF
ro=, vo= standard galpy unit scaling parameters
OUTPUT:
(none; sets up instance)
HISTORY:
2020-07-11 - Written - Bovy (UofT)
"""
# Initialize with Force just to parse (ro,vo)
Force.__init__(self,ro=ro,vo=vo)
newM= conversion.parse_mass(M,ro=self._ro,vo=self._vo)
if newM != M:
self.turn_physical_on(ro=self._ro,vo=self._vo)
M= newM
rt= conversion.parse_length(rt,ro=self._ro)
# Set up King DF
if _sfkdf is None:
from ..df.kingdf import _scalefreekingdf
sfkdf= _scalefreekingdf(W0)
sfkdf.solve(npt)
else:
sfkdf= _sfkdf
mass_scale= M/sfkdf.mass
radius_scale= rt/sfkdf.rt
# Remember whether to turn units on
ro= self._ro if self._roSet else ro
vo= self._vo if self._voSet else vo
interpSphericalPotential.__init__(\
self,
rforce=lambda r: mass_scale/radius_scale**2.
*numpy.interp(r/radius_scale,
sfkdf._r,
sfkdf._dWdr),
rgrid=sfkdf._r*radius_scale,
Phi0=-W0*mass_scale/radius_scale,
ro=ro,vo=vo)
| jobovy/galpy | galpy/potential/KingPotential.py | Python | bsd-3-clause | 2,587 |
from xml.dom.minidom import parseString
from xml.etree.ElementTree import tostring, SubElement, Element
from datetime import datetime
from dateutil.parser import parse
from api import XeroPrivateClient, XeroException
from api import XERO_BASE_URL, XERO_API_URL
import urllib
class XeroException404(XeroException):
pass
class XeroException500(XeroException):
pass
class XeroBadRequest(XeroException):
pass
class XeroNotImplemented(XeroException):
pass
class XeroExceptionUnknown(XeroException):
pass
class Manager(object):
DECORATED_METHODS = ('get', 'save', 'filter', 'all', 'put')
DATETIME_FIELDS = (u'UpdatedDateUTC', u'Updated', u'FullyPaidOnDate')
DATE_FIELDS = (u'DueDate', u'Date')
BOOLEAN_FIELDS = (u'IsSupplier', u'IsCustomer', u'HasAttachments')
MULTI_LINES = (u'LineItem', u'Phone', u'Address', 'TaxRate')
PLURAL_EXCEPTIONS = {'Addresse':'Address'}
def __init__(self, name, client):
self.client = client
self.name = name
# setup our singular variants of the name
# only if the name ends in 0
if name[-1] == "s":
self.singular = name[:len(name)-1]
else:
self.singular = name
for method_name in self.DECORATED_METHODS:
method = getattr(self, method_name)
setattr(self, method_name, self.__get_data(method))
def walk_dom(self, dom):
tree_list = tuple()
for node in dom.childNodes:
tagName = getattr(node, 'tagName', None)
if tagName:
tree_list += (tagName , self.walk_dom(node),)
else:
data = node.data.strip()
if data:
tree_list += (node.data.strip(),)
return tree_list
def convert_to_dict(self, deep_list):
out = {}
if len(deep_list) > 2:
lists = [l for l in deep_list if isinstance(l, tuple)]
keys = [l for l in deep_list if isinstance(l, unicode)]
for key, data in zip(keys, lists):
if len(data) == 1:
# we're setting a value
# check to see if we need to apply any special
# formatting to the value
val = data[0]
if key in self.BOOLEAN_FIELDS:
val = True if val.lower() == 'true' else False
if key in self.DATETIME_FIELDS:
#Jayd hack to convert datetime object to string
#Allows saving in MongoDB
val = parse(val).strftime("%Y-%m-%d %H:%M:%S")
if key in self.DATE_FIELDS:
#Jayd hack to convert datetime object to string
#Allows saving in MongoDB
val = parse(val).strftime("%Y-%m-%d %H:%M:%S")
out[key] = val
elif len(data) > 1 and ((key in self.MULTI_LINES) or (key == self.singular)):
# our data is a collection and needs to be handled as such
if out:
out += (self.convert_to_dict(data),)
else:
out = (self.convert_to_dict(data),)
elif len(data) > 1:
out[key] = self.convert_to_dict(data)
elif len(deep_list) == 2:
key = deep_list[0]
data = deep_list[1]
out[key] = self.convert_to_dict(data)
else:
out = deep_list[0]
return out
def dict_to_xml( self, root_elm, dict_data ):
for key in dict_data.keys():
_data = dict_data[key]
_elm = SubElement(root_elm, key)
_list_data = (isinstance(_data, list) or isinstance(_data, tuple))
_is_plural = (key[len(key)-1] == "s")
_plural_name = key[:len(key)-1]
if isinstance(_data, dict):
_elm = self.dict_to_xml(_elm, _data)
elif _list_data and not _is_plural:
for _d in _data:
__elm = self.dict_to_xml(_elm, _d)
elif _list_data:
for _d in _data:
_plural_name = self.PLURAL_EXCEPTIONS.get(_plural_name, _plural_name)
__elm = self.dict_to_xml(SubElement(_elm, _plural_name), _d)
else:
_elm.text = str(_data)
return root_elm
def __prepare_data__for_save(self, data):
if isinstance(data, list) or isinstance(data, tuple):
root_elm = Element(self.name)
for d in data:
sub_elm = SubElement(root_elm, self.singular)
self.dict_to_xml(sub_elm, d)
else:
root_elm = self.dict_to_xml(Element(self.singular), data)
return tostring(root_elm)
def __get_results(self, data):
response = data[u'Response']
result = response.get(self.name, {})
if isinstance(result, tuple):
return result
if isinstance(result, dict) and result.has_key(self.singular):
return result[self.singular]
def __get_data(self, func):
def wrapper(*args, **kwargs):
req_args = func(*args, **kwargs)
response = self.client.request(*req_args)
body = response[1]
headers = response[0]
if headers['status'] == '200':
if headers['content-type'] == 'application/pdf':
return body
dom = parseString(body)
data = self.convert_to_dict(self.walk_dom(dom))
return self.__get_results(data)
elif headers['status'] == '404':
msg = ' : '.join([str(headers['status']), body])
raise XeroException404(msg)
elif headers['status'] == '500':
msg = ' : '.join([str(headers['status']), body])
raise XeroException500(msg)
elif headers['status'] == '400' or headers['status'] == '401':
msg = ' : '.join([str(headers['status']), body])
raise XeroBadRequest(msg)
elif headers['status'] == '501':
msg = ' : '.join([str(headers['status']), body])
raise XeroNotImplemented(msg)
else:
msg = ' : '.join([str(headers['status']), body])
raise XeroExceptionUnknown(msg)
return wrapper
def get(self, id, headers=None):
uri = '/'.join([XERO_API_URL, self.name, id])
return uri, 'GET', None, headers
def save_or_put(self, data, method='post'):
headers = {
"Content-Type": "application/x-www-form-urlencoded; charset=utf-8"
}
uri = '/'.join([XERO_API_URL, self.name])
body = 'xml='+urllib.quote(self.__prepare_data__for_save(data))
return uri, method, body, headers
def save(self, data):
return self.save_or_put(data, method='post')
def put(self, data):
return self.save_or_put(data, method='PUT')
def prepare_filtering_date(self, val):
if isinstance(val, datetime):
val = val.strftime('%a, %d %b %Y %H:%M:%S GMT')
else:
val = '"%s"' % val
return {'If-Modified-Since': val}
def filter(self, **kwargs):
headers = None
uri = '/'.join([XERO_API_URL, self.name])
if kwargs:
if kwargs.has_key('Since'):
val = kwargs['Since']
headers = self.prepare_filtering_date(val)
del kwargs['Since']
def get_filter_params():
if key in self.BOOLEAN_FIELDS:
return 'true' if kwargs[key] else 'false'
elif key in self.DATETIME_FIELDS:
return kwargs[key].isoformat()
else:
return '"%s"' % str(kwargs[key])
def generate_param(key):
parts = key.split("__")
field = key.replace('_','.')
fmt = '%s==%s'
if len(parts) == 2:
# support filters:
# Name__Contains=John becomes Name.Contains("John")
if parts[1] in ["Contains", "StartsWith", "EndsWith"]:
field = parts[0]
fmt = ''.join(['%s.', parts[1], '(%s)'])
return fmt % (
field,
get_filter_params()
)
params = [generate_param(key) for key in kwargs.keys()]
if params:
uri += '?where=' + urllib.quote('&&'.join(params))
return uri, 'GET', None, headers
def all(self):
uri = '/'.join([XERO_API_URL, self.name])
return uri, 'GET', None, None
class Xero(object):
"""
An ORM interface to the Xero API
This has only been tested with the Private API
"""
OBJECT_LIST = (u'Contacts', u'Accounts', u'CreditNotes',
u'Currencies', u'Invoices', u'Organisation', u'Overpayments',
u'Payments', u'TaxRates', u'TrackingCategories')
def __init__(self, consumer_key, consumer_secret, privatekey):
# instantiate our private api client
client = XeroPrivateClient(consumer_key,
consumer_secret,
privatekey)
# iterate through the list of objects we support, for
# each of them create an attribute on our self that is
# the lowercase name of the object and attach it to an
# instance of a Manager object to operate on it
for name in self.OBJECT_LIST:
setattr(self, name.lower(), Manager(name, client))
| jaydlawrence/XeroPy | XeroPy/__init__.py | Python | bsd-3-clause | 9,848 |
"""
============================
Estimating Integration Error
============================
Objectives
----------
* Explain purpose of integration error
* Demonstrate integration error calculation procedure with PySPLIT
* Demonstrate another ``TrajectoryGroup``, ``Trajectory`` workflow tool
Intro
-----
Total trajectory error consists of physical and numerical error
(see http://www.arl.noaa.gov/faq_hg11.php). One part of the numerical error is
integration error, which can be estimated by generating an original/reverse
trajectory pair, in which the reverse trajectory is initialized at the end of
the original trajectory and run the opposite direction.
By calculating the total travel distance of the two trajectories and the
distance between the original trajectory start and reverse trajectory end
points, we can estimate absolute and relative integration error.
First, reverse trajectories must be available. For information on how to
generate reverse trajectories with ``PySPLIT``, see ``bulk_trajgen_example.py``
and ``reversetraj_clippedtraj_gen.py``.
Setup
-----
Load the original and reverse trajectories. This example uses the
trajectories generated in ``bulk_trajgen_example.py``.
"""
from __future__ import print_function
import numpy as np
import pysplit
trajgroup = pysplit.make_trajectorygroup(r'C:/trajectories/colgate/*')
for traj in trajgroup:
traj.load_reversetraj()
"""
Calculating integration error
-----------------------------
Values computed when calling ``Trajectory.calculate_integrationerr()``:
``Trajectory.integration_error``, the relative error (%)
``Trajectory.integration_error_abs``, the absolute error (meters)
"""
for traj in trajgroup:
traj.calculate_integrationerr()
"""
Usage example
-------------
Once we can have these values, one action we can take is to discard the "bad"
trajectories. A reasonable way to define "bad" trajectories are those with
integration errors greater than two standard deviations above the mean:
"""
relative_errors = [traj.integration_error for traj in trajgroup]
cutoff = np.mean(relative_errors) + (np.std(relative_errors) * 2)
print('Integration error upper limit: ', cutoff)
"""
With this data, we can cycle through ``trajgroup`` and either identify "good"
trajectories to put in a new ``TrajectoryGroup``
(see ``traj_trajgroup_basics_example.py``), or, as below, identify "bad"
trajectories to remove from ``trajgroup``. In this example, we make a list of
the identifiers (``Trajectory.trajid``) of "bad" trajectories, then pass the
list to the ``TrajectoryGroup.pop()`` method, which removes the indicated
trajectories from ``trajgroup``.
``TrajectoryGroup.pop()`` accepts a list of ``Trajectory.trajid``, a single
``Trajectory.trajid``, or an index. If none of the above are specified it
defaults to the last ``Trajectory`` in ``TrajectoryGroup``. As with
``list.pop()``, performing ``TrajectoryGroup.pop()``while iterating over
``TrajectoryGroup`` will lead to unexpected behavior.
``TrajectoryGroup.pop()`` returns a ``Trajectory``, if one
``Trajectory.trajid`` or an index is given, or a ``TrajectoryGroup``,
if given a list of ``Trajectory.trajid``.
"""
bad = []
for traj in trajgroup:
if traj.integration_error > cutoff:
bad.append(traj.trajid)
print('Expectation: ', trajgroup.trajcount, 'trajectories -', len(bad),
'bad trajectories =', trajgroup.trajcount-len(bad), 'trajectories')
trajgroup.pop(trajid=bad)
print('Result: ', trajgroup.trajcount, 'trajectories')
# for traj in trajgroup:
# print(traj.integration_error)
| mscross/pysplit | docs/examples/integration_error.py | Python | bsd-3-clause | 3,580 |
# -*- coding: utf-8 -*-
from django.test import SimpleTestCase
from corehq.apps.app_manager.exceptions import CaseXPathValidationError
from corehq.apps.app_manager.xpath import (
dot_interpolate,
UserCaseXPath,
interpolate_xpath,
)
class RegexTest(SimpleTestCase):
def test_regex(self):
replacement = "@case_id stuff"
cases = [
('./lmp < 570.5', '%s/lmp < 570.5'),
('stuff ./lmp < 570.', 'stuff %s/lmp < 570.'),
('.53 < hello.', '.53 < hello%s'),
]
for case in cases:
self.assertEqual(
dot_interpolate(case[0], replacement),
case[1] % replacement
)
def test_interpolate_xpath(self):
replacements = {
'case': "<casedb stuff>",
'user': UserCaseXPath().case(),
'session': "instance('commcaresession')/session",
}
cases = [
('./lmp < 570.5', '{case}/lmp < 570.5'),
('#case/lmp < 570.5', '{case}/lmp < 570.5'),
('stuff ./lmp < 570.', 'stuff {case}/lmp < 570.'),
('stuff #case/lmp < 570.', 'stuff {case}/lmp < 570.'),
('.53 < hello.', '.53 < hello{case}'),
('.53 < hello#case', '.53 < hello{case}'),
('#session/data/username', '{session}/data/username'),
('"jack" = #session/username', '"jack" = {session}/username'),
('./@case_id = #session/userid', '{case}/@case_id = {session}/userid'),
('#case/@case_id = #user/@case_id', '{case}/@case_id = {user}/@case_id'),
('#host/foo = 42', "instance('casedb')/casedb/case[@case_id={case}/index/host]/foo = 42"),
("'ham' = #parent/spam", "'ham' = instance('casedb')/casedb/case[@case_id={case}/index/parent]/spam"),
]
for case in cases:
self.assertEqual(
interpolate_xpath(case[0], replacements['case']),
case[1].format(**replacements)
)
def test_interpolate_xpath_error(self):
for case in ('./lmp < 570.5', '#case/lmp < 570.5'):
with self.assertRaises(CaseXPathValidationError):
interpolate_xpath(case, None),
| qedsoftware/commcare-hq | corehq/apps/app_manager/tests/test_suite_regex.py | Python | bsd-3-clause | 2,212 |
"""
Look-ahead routines to find end character.
+------------------------------------------------------+------------------------+
| Function | Description |
+======================================================+========================+
| :py:func:`~matlab2cpp.tree.findend.expression` | Find end of expression |
| | (non-space delimited) |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.expression_space` | Find end of expression |
| | (space delimited) |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.matrix` | Find end of matrix |
| | construction |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.string` | Find end of string |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.comment` | Find end of comment |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.dots` | Find continuation |
| | after ellipse |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.paren` | Find matching |
| | parenthesis |
+------------------------------------------------------+------------------------+
| :py:func:`~matlab2cpp.tree.findend.cell` | Find matching |
| | cell-parenthesis |
+------------------------------------------------------+------------------------+
"""
from . import constants, identify
def expression(self, start):
"""
Find end of expression (non-space delimited)
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of expression
"""
if self.code[start] not in constants.e_start:
self.syntaxerror(start, "expression start")
k = start
while True:
if self.code[k] == "(":
k = paren(self, k)
#k += 1
#break
elif self.code[k] == "[":
k = matrix(self, k)
elif self.code[k] == "'" and identify.string(self, k):
k = string(self, k)
elif self.code[k] == "{":
k = cell(self, k)
#elif self.code[k:k+3] == "...":
# k = dots(self, k)
elif self.code[k] == "=":
if self.code[k+1] == "=":
k += 1
else:
break
elif self.code[k] in "><~":
if self.code[k+1] == "=":
k += 1
elif self.code[k:k+3] == "...":
k = dots(self, k)
elif self.code[k] in constants.e_end:
break
k += 1
k -= 1
while self.code[k] in " \t":
k -= 1
return k
def expression_space(self, start):
"""
Find end of expression (space delimited)
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of expression
"""
if self.code[start] not in constants.e_start:
self.syntaxerror(start, "expression start")
k = last = start
while True:
if self.code[k] == "(":
k = last = paren(self, k)
elif self.code[k] == "[":
k = last = matrix(self, k)
elif self.code[k] == "'":
if identify.string(self, k):
k = last = string(self, k)
else:
last = k
elif self.code[k] == "{":
k = last = cell(self, k)
elif self.code[k:k+3] == "...":
k = dots(self, k)
elif self.code[k] == ";":
return last
elif self.code[k] == "=":
if self.code[k+1] == "=":
k += 1
else:
return last
elif self.code[k] in "><~":
if self.code[k+1] == "=":
k += 1
elif self.code[k] in "+-":
while self.code[k+1] in " \t":
k += 1
elif self.code[k] in " \t":
if identify.space_delimiter(self, k):
return last
while self.code[k+1] in " \t+-~":
k += 1
elif self.code[k] in constants.e_end:
return last
elif self.code[k] in constants.letters + constants.digits + "_@":
while self.code[k+1] in constants.letters + constants.digits + "_@":
k += 1
last = k
k += 1
def matrix(self, start):
"""
Find end of matrix construction
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of matrix
"""
if self.code[start] != "[":
self.syntaxerror(start, "matrix start ([)")
k = start+1
if identify.space_delimited(self, start):
# Ignore first string occurrence
while self.code[k] in " \t":
k += 1
if self.code[k] == "'":
k = string(self, k)+1
while True:
if self.code[k] == "[":
k = matrix(self, k)
elif self.code[k] == "]":
return k
elif self.code[k] == "%":
k = comment(self, k)
elif self.code[k] == "'" and identify.string(self, k): #and self.code[k-1] in constants.s_start:
k = string(self, k)
k += 1
else:
while True:
if self.code[k] == "[":
k = matrix(self, k)
elif self.code[k] == "]":
return k
elif self.code[k] == "%":
k = comment(self, k)
elif self.code[k] == "'" and identify.string(self, k):
k = string(self, k)
k += 1
def string(self, start):
"""
Find end of string
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of string
"""
if self.code[start] != "'":
self.syntaxerror(start, "start of string (')")
k = self.code.find("'", start+1)
if k == -1:
self.syntaxerror(start, "matching end of string (')")
if self.code.find("\n", start, k) != -1:
self.syntaxerror(start, "non line-feed character in string")
return k
def pragma_for(self,start):
end = self.code.find("\n", start)
#while self.code[end+1] == "%"
# end = self.code.find("\n", start+1)
if end <= -1:
self.syntaxerror(start, "comment end")
return end
def tbb_for(self, start):
end = self.code.find("\n", start)
if end <= -1:
self.syntaxerror(start, "command end")
return end
def comment(self, start):
"""
Find end of comment
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of comment
"""
if self.code[start] != "%":
self.syntaxerror(start, "comment start")
# block comment
if self.code[start+1] == "{":
eoc = self.code.find("%}", start+2)
if eoc <= -1:
self.syntaxerror(start, "matching end of comment block (%})")
return eoc+1
# Line comment
eoc = self.code.find("\n", start)
if eoc <= -1:
self.syntaxerror(start, "comment end")
return eoc
#should find the end of verbatim area
def verbatim(self, start):
"""
Find end of verbatim
Arg:
self(Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of verbatim
"""
if self.code[start:start+3] != "___":
self.syntaxerror(start, "verbatim start")
return self.code.find("\n", start)-1
def dots(self, start):
"""
Find continuation of expression after ellipse
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of end of ellipse
"""
if self.code[start:start+3] != "...":
self.syntaxerror(start, "three dots (...)")
k = self.code.find("\n", start)
if k == -1:
self.syntaxerror(start, "next line feed character")
return k
def paren(self, start):
"""
Find matching parenthesis
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of matching parenthesis
"""
if self.code[start] != "(":
self.syntaxerror(start, "start parenthesis")
k = start+1
while True:
if self.code[k] == "%":
self.syntaxerror(k, "no comments in parenthesis")
elif self.code[k:k+3] == "...":
k = dots(self, k)
elif self.code[k] == "'" and identify.string(self, k):
k = string(self, k)
elif self.code[k] == "[":
k = matrix(self, k)
elif self.code[k] == "(":
k = paren(self, k)
elif self.code[k] == ")":
return k
k += 1
def cell(self, start):
"""
Find matching cell-parenthesis
Args:
self (Builder): Code constructor
start (int): current position in code
Returns:
int: index location of matching cell-parenthesis
"""
if self.code[start] != "{":
self.syntaxerror(start, "start of cell ({)")
k = start
while True:
if self.code[k] == "%":
self.syntaxerror(k, "no comment in cell group")
elif self.code[k] == "'" and identify.string(self, k):
k = string(self, k)
elif self.code[k] == "(":
k = paren(self, k)
elif self.code[k] == "[":
k = matrix(self, k)
elif self.code[k] == "}":
l = k+1
while self.code[l] in " \t":
l += 1
if self.code[l] != "{":
return k
k = l
k += 1
| jonathf/matlab2cpp | src/matlab2cpp/tree/findend.py | Python | bsd-3-clause | 10,363 |
import abc
import pathlib
import settings
import urllib.parse
def duplicate_path_fragments(url, dup_max=3):
path = urllib.parse.urlparse(url).path
parts = pathlib.Path(path).parts
segments = {}
for chunk in parts:
if not chunk in segments:
segments[chunk] = 0
segments[chunk] += 1
return any([tmp >= dup_max for tmp in segments.values()])
class RawScraperModuleBase(metaclass=abc.ABCMeta):
'''
The interface contract for a scraper module is very simple.
Basically, it just involves three parameters. The module name, as a class
attribute, and two static methods.
`cares_about_url()` takes a url parameter, and returns a boolean containing
whether the module thinks it wants that URL. This is used to screen new URLs
as to whether they should be scraped.
`get_start_urls()` should return a list of URLs to pre-populate the "should crawl"
page list.
------
Additional functionality can be added via two additional classmethods, that are
optional.
`check_prefetch()` is called before each fetch for `url`, using webget instance
`wg_proxy`. This is intended to allow things like validating login state in the web
get instance, and other such functionality.
A return of `True` means everything is OK, a return of `False` means the prefetch
check cannot get the WebGet instance into the required state, for whatever reason.
`check_postfetch()` is called once content has been fetched, with the associated
data and metadata for the fetch (`url, wg_proxy, fname, fcontent, fmimetype`). This is
intended to allow the module to modify the content or metadata before it is
fed through the link extraction system/saved-to-disk. It can also allow more
banal operations such as clarifying filenames.
Return value is a 3-tuple `(fname, fcontent, fmimetype)`
'''
rewalk_interval = settings.RAW_REWALK_INTERVAL_DAYS
@abc.abstractproperty
def module_name(self):
pass
@classmethod
@abc.abstractmethod
def cares_about_url(cls, url):
pass
@classmethod
def is_disabled(cls, netloc, url):
return False
@classmethod
@abc.abstractmethod
def get_start_urls(cls):
pass
@classmethod
def check_prefetch(cls, url, wg_proxy):
return True
@classmethod
def single_thread_fetch(cls, url):
return False
@classmethod
def check_postfetch(cls, url, wg_proxy, fname, fcontent, fmimetype):
return fname, fcontent, fmimetype
@classmethod
def get_netlocs(cls):
urls = cls.get_start_urls()
netlocs = [urllib.parse.urlparse(tmp).netloc for tmp in urls]
return list(set(netlocs))
@staticmethod
def get_max_active_jobs():
return 100 | fake-name/ReadableWebProxy | RawArchiver/ModuleBase.py | Python | bsd-3-clause | 2,588 |
import fipy
gmsh_text_box = '''
// Define the square that acts as the system boundary.
dx = %(dx)g;
Lx = %(Lx)g;
Ly = %(Ly)g;
p_n_w = newp; Point(p_n_w) = {-Lx / 2.0, Ly / 2.0, 0, dx};
p_n_e = newp; Point(p_n_e) = {Lx / 2.0, Ly / 2.0, 0, dx};
p_s_e = newp; Point(p_s_e) = {Lx / 2.0, -Ly / 2.0, 0, dx};
p_s_w = newp; Point(p_s_w) = {-Lx / 2.0, -Ly / 2.0, 0, dx};
l_n = newl; Line(l_n) = {p_n_w, p_n_e};
l_e = newl; Line(l_e) = {p_n_e, p_s_e};
l_s = newl; Line(l_s) = {p_s_e, p_s_w};
l_w = newl; Line(l_w) = {p_s_w, p_n_w};
ll = newll; Line Loop(ll) = {l_n, l_e, l_s, l_w};
'''
gmsh_text_circle = '''
// Define a circle that acts as an obstacle
x = %(x)g;
y = %(y)g;
R = %(R)g;
p_c = newp; Point(p_c) = {x, y, 0, dx};
p_w = newp; Point(p_w) = {x - R, y, 0, dx};
p_n = newp; Point(p_n) = {x, y + R, 0, dx};
p_e = newp; Point(p_e) = {x + R, y, 0, dx};
p_s = newp; Point(p_s) = {x, y - R, 0, dx};
c_w_n = newreg; Circle(c_w_n) = {p_w, p_c, p_n};
c_n_e = newreg; Circle(c_n_e) = {p_n, p_c, p_e};
c_e_s = newreg; Circle(c_e_s) = {p_e, p_c, p_s};
c_s_w = newreg; Circle(c_s_w) = {p_s, p_c, p_w};
Line Loop(%(i)d) = {c_w_n, c_n_e, c_e_s, c_s_w};
'''
gmsh_text_surface = '''
// The first argument is the outer loop boundary.
// The remainder are holes in it.
Plane Surface(1) = {ll, %(args)s};
'''
def _porous_mesh_geo_factory(rs, R, dx, L):
gmsh_text = gmsh_text_box % {'dx': dx, 'Lx': L[0], 'Ly': L[1]}
circle_loop_indexes = []
if rs is not None and len(rs) and R:
for i in range(len(rs)):
index = 10 * (i + 1)
gmsh_text += gmsh_text_circle % {'x': rs[i][0], 'y': rs[i][1],
'R': R, 'i': index}
circle_loop_indexes += [index]
surface_args = ', '.join([str(i) for i in circle_loop_indexes])
gmsh_text += gmsh_text_surface % {'args': surface_args}
return gmsh_text
def uniform_mesh_factory(L, dx):
dim = len(L)
if dim == 1:
return fipy.Grid1D(dx=dx, Lx=L[0]) - L[0] / 2.0
elif dim == 2:
return (fipy.Grid2D(dx=dx, dy=dx, Lx=L[0], Ly=L[1]) -
((L[0] / 2.0,), (L[1] / 2.0,)))
def porous_mesh_factory(rs, R, dx, L):
return fipy.Gmsh2D(_porous_mesh_geo_factory(rs, R, dx, L))
| eddiejessup/ahoy | ahoy/mesh.py | Python | bsd-3-clause | 2,234 |
from bokeh.io import save
from bokeh.layouts import row
from bokeh.plotting import figure
def make_figure(output_backend):
p = figure(plot_width=400,
plot_height=400,
output_backend=output_backend,
title="Backend: %s" % output_backend)
p.circle(x=[1, 2, 3], y=[1, 2, 3], radius=0.25, color="blue", alpha=0.5)
p.annulus(x=[1, 2, 3], y=[1, 2, 3], inner_radius=0.1, outer_radius=0.20, color="orange")
return p
canvas = make_figure("canvas")
webgl = make_figure("webgl")
svg = make_figure("svg")
save(row(canvas, webgl, svg))
| ericmjl/bokeh | examples/integration/glyphs/frame_clipping_multi_backend.py | Python | bsd-3-clause | 593 |
# Copyright (c) 2014 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script is intended for use as a GYP_GENERATOR. It takes as input (by way of
the generator flag config_path) the path of a json file that dictates the files
and targets to search for. The following keys are supported:
files: list of paths (relative) of the files to search for.
targets: list of targets to search for. The target names are unqualified.
The following is output:
error: only supplied if there is an error.
targets: the set of targets passed in via targets that either directly or
indirectly depend upon the set of paths supplied in files.
build_targets: minimal set of targets that directly depend on the changed
files and need to be built. The expectation is this set of targets is passed
into a build step.
status: outputs one of three values: none of the supplied files were found,
one of the include files changed so that it should be assumed everything
changed (in this case targets and build_targets are not output) or at
least one file was found.
invalid_targets: list of supplied targets thare were not found.
If the generator flag analyzer_output_path is specified, output is written
there. Otherwise output is written to stdout.
"""
import gyp.common
import gyp.ninja_syntax as ninja_syntax
import json
import os
import posixpath
import sys
debug = False
found_dependency_string = 'Found dependency'
no_dependency_string = 'No dependencies'
# Status when it should be assumed that everything has changed.
all_changed_string = 'Found dependency (all)'
# MatchStatus is used indicate if and how a target depends upon the supplied
# sources.
# The target's sources contain one of the supplied paths.
MATCH_STATUS_MATCHES = 1
# The target has a dependency on another target that contains one of the
# supplied paths.
MATCH_STATUS_MATCHES_BY_DEPENDENCY = 2
# The target's sources weren't in the supplied paths and none of the target's
# dependencies depend upon a target that matched.
MATCH_STATUS_DOESNT_MATCH = 3
# The target doesn't contain the source, but the dependent targets have not yet
# been visited to determine a more specific status yet.
MATCH_STATUS_TBD = 4
generator_supports_multiple_toolsets = gyp.common.CrossCompileRequested()
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR',
'LIB_DIR', 'SHARED_LIB_DIR']:
generator_default_variables[dirname] = '!!!'
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
def _ToGypPath(path):
"""Converts a path to the format used by gyp."""
if os.sep == '\\' and os.altsep == '/':
return path.replace('\\', '/')
return path
def _ResolveParent(path, base_path_components):
"""Resolves |path|, which starts with at least one '../'. Returns an empty
string if the path shouldn't be considered. See _AddSources() for a
description of |base_path_components|."""
depth = 0
while path.startswith('../'):
depth += 1
path = path[3:]
# Relative includes may go outside the source tree. For example, an action may
# have inputs in /usr/include, which are not in the source tree.
if depth > len(base_path_components):
return ''
if depth == len(base_path_components):
return path
return '/'.join(base_path_components[0:len(base_path_components) - depth]) + \
'/' + path
def _AddSources(sources, base_path, base_path_components, result):
"""Extracts valid sources from |sources| and adds them to |result|. Each
source file is relative to |base_path|, but may contain '..'. To make
resolving '..' easier |base_path_components| contains each of the
directories in |base_path|. Additionally each source may contain variables.
Such sources are ignored as it is assumed dependencies on them are expressed
and tracked in some other means."""
# NOTE: gyp paths are always posix style.
for source in sources:
if not len(source) or source.startswith('!!!') or source.startswith('$'):
continue
# variable expansion may lead to //.
org_source = source
source = source[0] + source[1:].replace('//', '/')
if source.startswith('../'):
source = _ResolveParent(source, base_path_components)
if len(source):
result.append(source)
continue
result.append(base_path + source)
if debug:
print 'AddSource', org_source, result[len(result) - 1]
def _ExtractSourcesFromAction(action, base_path, base_path_components,
results):
if 'inputs' in action:
_AddSources(action['inputs'], base_path, base_path_components, results)
def _ToLocalPath(toplevel_dir, path):
"""Converts |path| to a path relative to |toplevel_dir|."""
if path == toplevel_dir:
return ''
if path.startswith(toplevel_dir + '/'):
return path[len(toplevel_dir) + len('/'):]
return path
def _ExtractSources(target, target_dict, toplevel_dir):
# |target| is either absolute or relative and in the format of the OS. Gyp
# source paths are always posix. Convert |target| to a posix path relative to
# |toplevel_dir_|. This is done to make it easy to build source paths.
base_path = posixpath.dirname(_ToLocalPath(toplevel_dir, _ToGypPath(target)))
base_path_components = base_path.split('/')
# Add a trailing '/' so that _AddSources() can easily build paths.
if len(base_path):
base_path += '/'
if debug:
print 'ExtractSources', target, base_path
results = []
if 'sources' in target_dict:
_AddSources(target_dict['sources'], base_path, base_path_components,
results)
# Include the inputs from any actions. Any changes to these affect the
# resulting output.
if 'actions' in target_dict:
for action in target_dict['actions']:
_ExtractSourcesFromAction(action, base_path, base_path_components,
results)
if 'rules' in target_dict:
for rule in target_dict['rules']:
_ExtractSourcesFromAction(rule, base_path, base_path_components, results)
return results
class Target(object):
"""Holds information about a particular target:
deps: set of Targets this Target depends upon. This is not recursive, only the
direct dependent Targets.
match_status: one of the MatchStatus values.
back_deps: set of Targets that have a dependency on this Target.
visited: used during iteration to indicate whether we've visited this target.
This is used for two iterations, once in building the set of Targets and
again in _GetBuildTargets().
name: fully qualified name of the target.
requires_build: True if the target type is such that it needs to be built.
See _DoesTargetTypeRequireBuild for details.
added_to_compile_targets: used when determining if the target was added to the
set of targets that needs to be built.
in_roots: true if this target is a descendant of one of the root nodes.
is_executable: true if the type of target is executable.
is_static_library: true if the type of target is static_library.
is_or_has_linked_ancestor: true if the target does a link (eg executable), or
if there is a target in back_deps that does a link."""
def __init__(self, name):
self.deps = set()
self.match_status = MATCH_STATUS_TBD
self.back_deps = set()
self.name = name
# TODO(sky): I don't like hanging this off Target. This state is specific
# to certain functions and should be isolated there.
self.visited = False
self.requires_build = False
self.added_to_compile_targets = False
self.in_roots = False
self.is_executable = False
self.is_static_library = False
self.is_or_has_linked_ancestor = False
class Config(object):
"""Details what we're looking for
files: set of files to search for
targets: see file description for details."""
def __init__(self):
self.files = []
self.targets = set()
def Init(self, params):
"""Initializes Config. This is a separate method as it raises an exception
if there is a parse error."""
generator_flags = params.get('generator_flags', {})
config_path = generator_flags.get('config_path', None)
if not config_path:
return
try:
f = open(config_path, 'r')
config = json.load(f)
f.close()
except IOError:
raise Exception('Unable to open file ' + config_path)
except ValueError as e:
raise Exception('Unable to parse config file ' + config_path + str(e))
if not isinstance(config, dict):
raise Exception('config_path must be a JSON file containing a dictionary')
self.files = config.get('files', [])
self.targets = set(config.get('targets', []))
def _WasBuildFileModified(build_file, data, files, toplevel_dir):
"""Returns true if the build file |build_file| is either in |files| or
one of the files included by |build_file| is in |files|. |toplevel_dir| is
the root of the source tree."""
if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
if debug:
print 'gyp file modified', build_file
return True
# First element of included_files is the file itself.
if len(data[build_file]['included_files']) <= 1:
return False
for include_file in data[build_file]['included_files'][1:]:
# |included_files| are relative to the directory of the |build_file|.
rel_include_file = \
_ToGypPath(gyp.common.UnrelativePath(include_file, build_file))
if _ToLocalPath(toplevel_dir, rel_include_file) in files:
if debug:
print 'included gyp file modified, gyp_file=', build_file, \
'included file=', rel_include_file
return True
return False
def _GetOrCreateTargetByName(targets, target_name):
"""Creates or returns the Target at targets[target_name]. If there is no
Target for |target_name| one is created. Returns a tuple of whether a new
Target was created and the Target."""
if target_name in targets:
return False, targets[target_name]
target = Target(target_name)
targets[target_name] = target
return True, target
def _DoesTargetTypeRequireBuild(target_dict):
"""Returns true if the target type is such that it needs to be built."""
# If a 'none' target has rules or actions we assume it requires a build.
return target_dict['type'] != 'none' or \
target_dict.get('actions') or target_dict.get('rules')
def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files,
build_files):
"""Returns a tuple of the following:
. A dictionary mapping from fully qualified name to Target.
. A list of the targets that have a source file in |files|.
. Set of root Targets reachable from the the files |build_files|.
This sets the |match_status| of the targets that contain any of the source
files in |files| to MATCH_STATUS_MATCHES.
|toplevel_dir| is the root of the source tree."""
# Maps from target name to Target.
targets = {}
# Targets that matched.
matching_targets = []
# Queue of targets to visit.
targets_to_visit = target_list[:]
# Maps from build file to a boolean indicating whether the build file is in
# |files|.
build_file_in_files = {}
# Root targets across all files.
roots = set()
# Set of Targets in |build_files|.
build_file_targets = set()
while len(targets_to_visit) > 0:
target_name = targets_to_visit.pop()
created_target, target = _GetOrCreateTargetByName(targets, target_name)
if created_target:
roots.add(target)
elif target.visited:
continue
target.visited = True
target.requires_build = _DoesTargetTypeRequireBuild(
target_dicts[target_name])
target_type = target_dicts[target_name]['type']
target.is_executable = target_type == 'executable'
target.is_static_library = target_type == 'static_library'
target.is_or_has_linked_ancestor = (target_type == 'executable' or
target_type == 'shared_library')
build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
if not build_file in build_file_in_files:
build_file_in_files[build_file] = \
_WasBuildFileModified(build_file, data, files, toplevel_dir)
if build_file in build_files:
build_file_targets.add(target)
# If a build file (or any of its included files) is modified we assume all
# targets in the file are modified.
if build_file_in_files[build_file]:
print 'matching target from modified build file', target_name
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
else:
sources = _ExtractSources(target_name, target_dicts[target_name],
toplevel_dir)
for source in sources:
if source in files:
print 'target', target_name, 'matches', source
target.match_status = MATCH_STATUS_MATCHES
matching_targets.append(target)
break
# Add dependencies to visit as well as updating back pointers for deps.
for dep in target_dicts[target_name].get('dependencies', []):
targets_to_visit.append(dep)
created_dep_target, dep_target = _GetOrCreateTargetByName(targets, dep)
if not created_dep_target:
roots.discard(dep_target)
target.deps.add(dep_target)
dep_target.back_deps.add(target)
return targets, matching_targets, roots & build_file_targets
def _GetUnqualifiedToTargetMapping(all_targets, to_find):
"""Returns a mapping (dictionary) from unqualified name to Target for all the
Targets in |to_find|."""
result = {}
if not to_find:
return result
to_find = set(to_find)
for target_name in all_targets.keys():
extracted = gyp.common.ParseQualifiedTarget(target_name)
if len(extracted) > 1 and extracted[1] in to_find:
to_find.remove(extracted[1])
result[extracted[1]] = all_targets[target_name]
if not to_find:
return result
return result
def _DoesTargetDependOn(target):
"""Returns true if |target| or any of its dependencies matches the supplied
set of paths. This updates |matches| of the Targets as it recurses.
target: the Target to look for."""
if target.match_status == MATCH_STATUS_DOESNT_MATCH:
return False
if target.match_status == MATCH_STATUS_MATCHES or \
target.match_status == MATCH_STATUS_MATCHES_BY_DEPENDENCY:
return True
for dep in target.deps:
if _DoesTargetDependOn(dep):
target.match_status = MATCH_STATUS_MATCHES_BY_DEPENDENCY
return True
target.match_status = MATCH_STATUS_DOESNT_MATCH
return False
def _GetTargetsDependingOn(possible_targets):
"""Returns the list of Targets in |possible_targets| that depend (either
directly on indirectly) on the matched targets.
possible_targets: targets to search from."""
found = []
for target in possible_targets:
if _DoesTargetDependOn(target):
found.append(target)
return found
def _AddBuildTargets(target, roots, add_if_no_ancestor, result):
"""Recurses through all targets that depend on |target|, adding all targets
that need to be built (and are in |roots|) to |result|.
roots: set of root targets.
add_if_no_ancestor: If true and there are no ancestors of |target| then add
|target| to |result|. |target| must still be in |roots|.
result: targets that need to be built are added here."""
if target.visited:
return
target.visited = True
target.in_roots = not target.back_deps and target in roots
for back_dep_target in target.back_deps:
_AddBuildTargets(back_dep_target, roots, False, result)
target.added_to_compile_targets |= back_dep_target.added_to_compile_targets
target.in_roots |= back_dep_target.in_roots
target.is_or_has_linked_ancestor |= (
back_dep_target.is_or_has_linked_ancestor)
# Always add 'executable' targets. Even though they may be built by other
# targets that depend upon them it makes detection of what is going to be
# built easier.
# And always add static_libraries that have no dependencies on them from
# linkables. This is necessary as the other dependencies on them may be
# static libraries themselves, which are not compile time dependencies.
if target.in_roots and \
(target.is_executable or
(not target.added_to_compile_targets and
(add_if_no_ancestor or target.requires_build)) or
(target.is_static_library and add_if_no_ancestor and
not target.is_or_has_linked_ancestor)):
result.add(target)
target.added_to_compile_targets = True
def _GetBuildTargets(matching_targets, roots):
"""Returns the set of Targets that require a build.
matching_targets: targets that changed and need to be built.
roots: set of root targets in the build files to search from."""
result = set()
for target in matching_targets:
_AddBuildTargets(target, roots, True, result)
return result
def _WriteOutput(params, **values):
"""Writes the output, either to stdout or a file is specified."""
if 'error' in values:
print 'Error:', values['error']
if 'status' in values:
print values['status']
if 'targets' in values:
values['targets'].sort()
print 'Supplied targets that depend on changed files:'
for target in values['targets']:
print '\t', target
if 'invalid_targets' in values:
values['invalid_targets'].sort()
print 'The following targets were not found:'
for target in values['invalid_targets']:
print '\t', target
if 'build_targets' in values:
values['build_targets'].sort()
print 'Targets that require a build:'
for target in values['build_targets']:
print '\t', target
output_path = params.get('generator_flags', {}).get(
'analyzer_output_path', None)
if not output_path:
print json.dumps(values)
return
try:
f = open(output_path, 'w')
f.write(json.dumps(values) + '\n')
f.close()
except IOError as e:
print 'Error writing to output file', output_path, str(e)
def _WasGypIncludeFileModified(params, files):
"""Returns true if one of the files in |files| is in the set of included
files."""
if params['options'].includes:
for include in params['options'].includes:
if _ToGypPath(include) in files:
print 'Include file modified, assuming all changed', include
return True
return False
def _NamesNotIn(names, mapping):
"""Returns a list of the values in |names| that are not in |mapping|."""
return [name for name in names if name not in mapping]
def _LookupTargets(names, mapping):
"""Returns a list of the mapping[name] for each value in |names| that is in
|mapping|."""
return [mapping[name] for name in names if name in mapping]
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
flavor = gyp.common.GetFlavor(params)
if flavor == 'mac':
default_variables.setdefault('OS', 'mac')
elif flavor == 'win':
default_variables.setdefault('OS', 'win')
# Copy additional generator configuration data from VS, which is shared
# by the Windows Ninja generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
else:
operating_system = flavor
if flavor == 'android':
operating_system = 'linux' # Keep this legacy behavior for now.
default_variables.setdefault('OS', operating_system)
def GenerateOutput(target_list, target_dicts, data, params):
"""Called by gyp as the final stage. Outputs results."""
config = Config()
try:
config.Init(params)
if not config.files:
raise Exception('Must specify files to analyze via config_path generator '
'flag')
toplevel_dir = _ToGypPath(os.path.abspath(params['options'].toplevel_dir))
if debug:
print 'toplevel_dir', toplevel_dir
if _WasGypIncludeFileModified(params, config.files):
result_dict = { 'status': all_changed_string,
'targets': list(config.targets) }
_WriteOutput(params, **result_dict)
return
all_targets, matching_targets, roots = _GenerateTargets(
data, target_list, target_dicts, toplevel_dir, frozenset(config.files),
params['build_files'])
unqualified_mapping = _GetUnqualifiedToTargetMapping(all_targets,
config.targets)
invalid_targets = None
if len(unqualified_mapping) != len(config.targets):
invalid_targets = _NamesNotIn(config.targets, unqualified_mapping)
if matching_targets:
search_targets = _LookupTargets(config.targets, unqualified_mapping)
matched_search_targets = _GetTargetsDependingOn(search_targets)
# Reset the visited status for _GetBuildTargets.
for target in all_targets.itervalues():
target.visited = False
build_targets = _GetBuildTargets(matching_targets, roots)
matched_search_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in matched_search_targets]
build_targets = [gyp.common.ParseQualifiedTarget(target.name)[1]
for target in build_targets]
else:
matched_search_targets = []
build_targets = []
result_dict = { 'targets': matched_search_targets,
'status': found_dependency_string if matching_targets else
no_dependency_string,
'build_targets': build_targets}
if invalid_targets:
result_dict['invalid_targets'] = invalid_targets
_WriteOutput(params, **result_dict)
except Exception as e:
_WriteOutput(params, error=str(e))
| ryfx/gyp | pylib/gyp/generator/analyzer.py | Python | bsd-3-clause | 22,381 |
from datetime import datetime
from django.db import transaction
from django.http import Http404
from django.utils.translation import gettext_lazy as _
from corehq import toggles
from corehq.apps.registry.models import DataRegistry, RegistryInvitation, RegistryGrant, RegistryAuditLog
from corehq.apps.registry.signals import (
data_registry_activated,
data_registry_deactivated,
data_registry_schema_changed,
data_registry_invitation_created,
data_registry_invitation_removed,
data_registry_invitation_accepted,
data_registry_invitation_rejected,
data_registry_grant_created,
data_registry_grant_removed,
data_registry_deleted,
)
from corehq.apps.users.decorators import require_permission_raw
from corehq.apps.users.models import Permissions
def _get_registry_or_404(domain, registry_slug):
try:
return DataRegistry.objects.visible_to_domain(domain).get(slug=registry_slug)
except DataRegistry.DoesNotExist:
raise Http404
class RegistryPermissionCheck:
def __init__(self, domain, couch_user):
self.domain = domain
self.couch_user = couch_user
role = couch_user.get_role(domain, allow_enterprise=True)
self._permissions = role.permissions if role else Permissions()
self.manageable_slugs = set(self._permissions.manage_data_registry_list)
self.can_manage_all = self._permissions.manage_data_registry
self.can_manage_some = self.can_manage_all or bool(self.manageable_slugs)
def can_manage_registry(self, slug):
return self.can_manage_all or slug in self.manageable_slugs
def can_view_registry_data(self, slug):
return (
self._permissions.view_data_registry_contents
or slug in self._permissions.view_data_registry_contents_list
)
@staticmethod
def user_can_manage_some(couch_user, domain):
return RegistryPermissionCheck(domain, couch_user).can_manage_some
@staticmethod
def user_can_manage_all(couch_user, domain):
return RegistryPermissionCheck(domain, couch_user).can_manage_all
def can_view_some_data_registry_contents(self):
return self._permissions.view_data_registry_contents or bool(self._permissions.
view_data_registry_contents_list)
manage_some_registries_required = require_permission_raw(RegistryPermissionCheck.user_can_manage_some)
manage_all_registries_required = require_permission_raw(RegistryPermissionCheck.user_can_manage_all)
class DataRegistryCrudHelper:
def __init__(self, domain, registry_slug, request_user):
self.domain = domain
self.registry = _get_registry_or_404(domain, registry_slug)
self.user = request_user
def check_permission(self, couch_user):
return RegistryPermissionCheck(self.domain, couch_user).can_manage_registry(self.registry.slug)
def set_attr(self, attr, value):
setattr(self.registry, attr, value)
self.registry.save()
def set_active_state(self, is_active):
if is_active:
self.activate()
else:
self.deactivate()
def activate(self):
if not self.registry.is_active:
self.registry.activate(self.user)
data_registry_activated.send(sender=DataRegistry, registry=self.registry)
def deactivate(self):
if self.registry.is_active:
self.registry.deactivate(self.user)
data_registry_deactivated.send(sender=DataRegistry, registry=self.registry)
@transaction.atomic
def update_schema(self, schema):
if schema != self.registry.schema:
old_schema = self.registry.schema
self.registry.schema = schema
self.registry.save()
self.registry.logger.schema_changed(self.user, schema, old_schema)
data_registry_schema_changed.send(
sender=DataRegistry, registry=self.registry, new_schema=schema, old_schema=old_schema
)
@transaction.atomic
def get_or_create_invitation(self, domain):
from corehq.apps.domain.models import Domain
# TODO: check that domain is part of the same account
domain_obj = Domain.get_by_name(domain)
if not domain_obj:
raise ValueError(f"Domain not found: {domain}")
invitation, created = self.registry.invitations.get_or_create(domain=domain)
if created:
self.registry.logger.invitation_added(self.user, invitation)
data_registry_invitation_created.send(sender=DataRegistry, registry=self.registry, invitation=invitation)
toggles.DATA_REGISTRY.set(domain, True, namespace=toggles.NAMESPACE_DOMAIN)
return invitation, created
@transaction.atomic
def remove_invitation(self, domain, invitation_id):
try:
invitation = self.registry.invitations.get(id=invitation_id)
except RegistryInvitation.DoesNotExist:
raise Http404
if invitation.domain != domain:
raise ValueError()
invitation.delete()
self.registry.logger.invitation_removed(self.user, invitation_id, invitation)
data_registry_invitation_removed.send(sender=DataRegistry, registry=self.registry, invitation=invitation)
@transaction.atomic
def get_or_create_grant(self, from_domain, to_domains):
available_domains = set(self.registry.invitations.values_list("domain", flat=True))
not_invited = set(to_domains) - available_domains
if not_invited:
raise ValueError(_("Domains must be invited before grants can be created: {not_invited}").format(
not_invited=not_invited
))
grant, created = self.registry.grants.get_or_create(from_domain=from_domain, to_domains=to_domains)
if created:
self.registry.logger.grant_added(self.user, grant)
data_registry_grant_created.send(
sender=DataRegistry, registry=self.registry, from_domain=from_domain, to_domains=to_domains
)
return grant, created
@transaction.atomic
def remove_grant(self, from_domain, grant_id):
try:
grant = self.registry.grants.get(from_domain=from_domain, id=grant_id)
except RegistryGrant.DoesNotExist:
raise Http404
assert grant.registry_id == self.registry.id
grant.delete()
self.registry.logger.grant_removed(self.user, grant_id, grant)
data_registry_grant_removed.send(
sender=DataRegistry, registry=self.registry, from_domain=from_domain, to_domains=grant.to_domains
)
return grant
def accept_invitation(self, domain):
try:
invitation = self.registry.invitations.get(domain=domain)
except RegistryInvitation.DoesNotExist:
raise Http404
if not invitation.is_accepted:
previous_status = invitation.status
invitation.accept(self.user)
data_registry_invitation_accepted.send(
sender=DataRegistry, registry=self.registry, invitation=invitation, previous_status=previous_status
)
return invitation
def reject_invitation(self, domain):
try:
invitation = self.registry.invitations.get(domain=domain)
except RegistryInvitation.DoesNotExist:
raise Http404
if not invitation.is_rejected:
previous_status = invitation.status
invitation.reject(self.user)
data_registry_invitation_rejected.send(
sender=DataRegistry, registry=self.registry, invitation=invitation, previous_status=previous_status
)
return invitation
@transaction.atomic
def delete_registry(self):
# TODO: figure out what to do here
self.registry.delete()
data_registry_deleted.send(sender=DataRegistry, registry=self.registry)
class DataRegistryAuditViewHelper:
def __init__(self, domain, registry_slug):
self.domain = domain
self.registry = _get_registry_or_404(domain, registry_slug)
self.is_owner = domain == self.registry.domain
self.filter_kwargs = {}
def filter(self, domain, start_date, end_date, action):
if domain:
self.filter_kwargs["domain"] = domain
if start_date:
self.filter_kwargs["date__gte"] = start_date
if end_date:
self.filter_kwargs["date__lte"] = datetime.combine(end_date, datetime.max.time())
if action:
self.filter_kwargs["action"] = action
@property
def query(self):
query = self.registry.audit_logs.select_related("user")
if not self.is_owner:
self.filter_kwargs["domain"] = self.domain
return query.filter(**self.filter_kwargs)
def get_logs(self, skip, limit):
return [log.to_json() for log in self.query[skip:skip + limit]]
def get_total(self):
return self.query.count()
@staticmethod
def action_options(is_owner):
options = RegistryAuditLog.ACTION_CHOICES if is_owner else RegistryAuditLog.NON_OWNER_ACTION_CHOICES
return [
{"id": option[0], "text": option[1]}
for option in options
]
def get_data_registry_dropdown_options(domain, required_case_types=None, permission_checker=None):
registries = DataRegistry.objects.visible_to_domain(domain)
if permission_checker:
registries = [registry for registry in registries
if permission_checker.can_view_registry_data(registry.slug)]
return [
{"slug": registry.slug, "name": registry.name}
for registry in registries
if not required_case_types or set(registry.wrapped_schema.case_types) & required_case_types
]
| dimagi/commcare-hq | corehq/apps/registry/utils.py | Python | bsd-3-clause | 9,875 |
from distutils.core import setup
setup(name='scrapy-elasticsearch-bulk-item-exporter',
version='0.1',
license='Apache License, Version 2.0',
description='An extension of Scrapys JsonLinesItemExporter that exports to elasticsearch bulk format.',
author='Florian Gilcher',
author_email='[email protected]',
url='http://github.com/asquera/scrapy-elasticsearch-bulk-item-exporter',
keywords="scrapy elastic search",
py_modules=['scrapyelasticsearch'],
platforms = ['Any'],
install_requires = ['scrapy'],
classifiers = [ 'Development Status :: 4 - Beta',
'Environment :: No Input/Output (Daemon)',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python']
)
| skade/scrapy-elasticsearch-bulk-item-exporter | setup.py | Python | bsd-3-clause | 872 |
## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['bwi_local'],
package_dir={'': 'src'})
setup(**setup_args)
| utexas-bwi/bwi_lab | bwi_local/setup.py | Python | bsd-3-clause | 307 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "RelativeDifference", sigma = 0.0, exog_count = 100, ar_order = 0); | antoinecarme/pyaf | tests/artificial/transf_RelativeDifference/trend_MovingAverage/cycle_12/ar_/test_artificial_1024_RelativeDifference_MovingAverage_12__100.py | Python | bsd-3-clause | 280 |
def iter_ngram(seq, max_order, min_order=None, sent_start=None, sent_end=None):
if min_order > max_order:
raise ValueError("min_order > max_order (%d > %d)" % (min_order, max_order))
if min_order is None:
min_order = max_order
orders = range(min_order, max_order+1)
it = iter(seq)
if sent_start is not None:
buffer = [sent_start]*max_order
else:
buffer = []
last_countdown = None
while True:
if last_countdown is None:
try:
item = it.next()
except StopIteration:
if sent_end is None:
break
else:
last_countdown = max_order - 1
item = sent_end
else:
if last_countdown <= 1:
break
item = sent_end
last_countdown -= 1
buffer.append(item)
del buffer[:-max_order]
for n in orders:
if len(buffer) < n:
continue
yield buffer[-n:]
def iter_ngram_pad(seq, max_order, min_order=None, sent_start=None, sent_end=None, padding=[]):
if len(padding) < max_order-1:
raise ValueError("padding must have at least %d items" % (max_order-1))
offset = len(padding)-max_order
for ngram in iter_ngram(seq, max_order, min_order, sent_start, sent_end):
n = len(ngram)
yield ngram+padding[offset+n:]
| honzas83/kitchen | kitchen/utils.py | Python | bsd-3-clause | 1,451 |
import unittest
import numpy
import scipy.sparse
from pylearn2.testing.skip import skip_if_no_data
import pylearn2.datasets.utlc as utlc
def test_ule():
skip_if_no_data()
# Test loading of transfer data
train, valid, test, transfer = utlc.load_ndarray_dataset("ule", normalize=True, transfer=True)
assert train.shape[0]==transfer.shape[0]
#@unittest.skip("Slow and needs >8 GB of RAM")
def test_all_utlc():
skip_if_no_data()
for name in ['avicenna','harry','ule']: # not testing rita, because it requires a lot of memorz and is slow
print "Loading ", name
train, valid, test = utlc.load_ndarray_dataset(name, normalize=True)
print "dtype, max, min, mean, std"
print train.dtype, train.max(), train.min(), train.mean(), train.std()
assert isinstance(train, numpy.ndarray), "train is not an ndarray in %s dataset" % name
assert isinstance(valid, numpy.ndarray), "valid is not an ndarray in %s dataset" % name
assert isinstance(test, numpy.ndarray), "test is not an ndarray in %s dataset" % name
assert train.shape[1]==test.shape[1]==valid.shape[1], "shapes of datasets does not match for %s" % name
def test_sparse_ule():
skip_if_no_data()
# Test loading of transfer data
train, valid, test, transfer = utlc.load_sparse_dataset("ule", normalize=True, transfer=True)
assert train.shape[0]==transfer.shape[0]
def test_all_sparse_utlc():
skip_if_no_data()
for name in ['harry','terry','ule']:
print "Loading sparse ", name
train, valid, test = utlc.load_sparse_dataset(name, normalize=True)
nb_elem = numpy.prod(train.shape)
mi = train.data.min()
ma = train.data.max()
mi = min(0, mi)
ma = max(0, ma)
su = train.data.sum()
mean = float(su)/nb_elem
print name,"dtype, max, min, mean, nb non-zero, nb element, %sparse"
print train.dtype, ma, mi, mean, train.nnz, nb_elem, (nb_elem-float(train.nnz))/nb_elem
print name,"max, min, mean, std (all stats on non-zero element)"
print train.data.max(), train.data.min(), train.data.mean(), train.data.std()
assert scipy.sparse.issparse(train), "train is not sparse for %s dataset" % name
assert scipy.sparse.issparse(valid), "valid is not sparse for %s dataset" % name
assert scipy.sparse.issparse(test), "test is not sparse for %s dataset" % name
assert train.shape[1]==test.shape[1]==valid.shape[1], "shapes of sparse %s dataset do not match" % name
| KennethPierce/pylearnk | pylearn2/datasets/tests/test_utlc.py | Python | bsd-3-clause | 2,547 |
#-*- coding: utf-8 -*-
from nose.tools import raises
from pybonita import BonitaServer
from pybonita.tests import TestWithMockedServer, build_dumb_bonita_error_body,\
build_bonita_process_definition_xml
from pybonita.process import BonitaProcess
class TestGetProcess(TestWithMockedServer):
@classmethod
def setUpClass(cls):
pass
@classmethod
def tearDownClass(cls):
pass
def test_not_found_process(self):
""" Retrieve not existing process """
# Setup the response for MockServer
BonitaServer.use('localhost', 9090, 'restuser', 'restbpm')
url = '/queryDefinitionAPI/getProcess/MonProcessus1--1.0'
code = 500
xml = build_dumb_bonita_error_body('ProcessNotFoundException',message='Bonita Error: bai_QDAPII_5\nCan\'t find a process with uuid MonProcessus1--1.0')
BonitaServer.set_response_list([[url,code,xml]])
process = BonitaProcess.get('MonProcessus1--1.0')
assert process == None
def test_get_process(self):
""" Retrieve a process """
# Setup the response for MockServer
BonitaServer.use('localhost', 9090, 'restuser', 'restbpm')
url = u'/queryDefinitionAPI/getProcess/MonProcessus1--1.0'
code = 200
xml = build_bonita_process_definition_xml(uuid=u'MonProcessus1--1.0', name=u'MonProcessus1', version=u'1.0')
BonitaServer.set_response_list([[url,code,xml]])
process = BonitaProcess.get(u'MonProcessus1--1.0')
assert process != None
assert isinstance(process,BonitaProcess)
assert process.uuid == u'MonProcessus1--1.0'
assert process.name == u'MonProcessus1'
assert process.version == u'1.0' | julozi/pybonita | pybonita/tests/unit/process/test_process.py | Python | bsd-3-clause | 1,722 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# =============================================================================
# DOCS
# =============================================================================
"""Integration with Jupyter notebook
"""
# =============================================================================
# FUNCTIONS
# =============================================================================
def load_ipython_extension(ipython):
from corral import core
core.setup_environment()
| toros-astro/corral | corral/libs/notebook_extension.py | Python | bsd-3-clause | 526 |
"""
Usage:
# Instantiate with API host, username, and password:
>>> gc = GoodsCloudAPIClient(host="http://app.goodscloud.com", user="[email protected]", pwd="mypass")
# Then, do requests as follows:
>>> orders = gc.get(
>>> "internal/order",
>>> q=dict(filters=[dict(name="channel_id", op="eq", val=16)]), results_per_page=20, page=1)
200 OK
>>> first_id = orders.json()['objects'][0]['id']
>>> gc.patch(url="internal/order/{}".format(first_id),
>>> dict(pay_later=True)
200 OK
>>> gc.delete(url="internal/order/{}".format(first_id))
204 NO CONTENT
# Instantiating a GoodsCloudAPIClient with debug=True provides output to debug
# authentication and request composition issues with partners.
"""
from base64 import b64encode
from hashlib import sha1, md5
import hmac
import json
import logging
import sys
import time
from functools import wraps
try:
from urllib import urlencode, unquote_plus
except ImportError:
from urllib.parse import urlencode, unquote_plus
import requests
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
logger.setLevel(logging.INFO)
def request_wrapper(fn):
"""Function decorator executing common tasks for each request:
* prepend path with /api/
* make request
* print response status code and reason
"""
@wraps(fn)
def wrap_request(self, path, *args, **kwargs):
assert path.startswith(("/api/internal", "/api/external")), (
"The provided URL path must start with `/api/internal` or `/api/external`."
)
resp = fn(self, path, *args, **kwargs)
logger.info('{} {}'.format(resp.status_code, resp.reason))
return resp
return wrap_request
class GoodsCloudAPIClient(object):
def __init__(self, host, user, pwd, version='current', debug=False, aws_credentials=False):
self.host = host # Example: `https://app.goodscloud.com`
self.user = user
self.pwd = pwd
self.session = self.login(self.user, self.pwd, aws_credentials)
self.auth = self.session['auth']
self.headers = {'Accept': 'application/json; version=%s' % (version,)}
self.debug = debug
if self.debug is True:
sys.settrace(debug_trace)
def login(self, email, password, aws_credentials):
headers = {"GC-Email": email, "GC-Password": password, "GC-AWS": aws_credentials}
resp = requests.post(
self.host + '/session',
headers=headers,
verify=False,
)
try:
session = resp.json()
except ValueError as exc:
logger.critical(resp.request.url)
logger.critical(resp.text)
raise exc
assert session['email'] == email, "Login failed on {}".format(
self.host)
return session
def _create_sign_str(self, path, method, params, expires, body_data):
"""Returns the input string to be hashed."""
# Parameters are sorted, but not urlencoded, for md5 digest.
str_params = '&'.join("%s=%s" % (a, b) for a, b in sorted(params))
sign_str = '\n'.join([
method,
path,
md5(str_params.encode('utf-8')).hexdigest(),
md5(body_data or b'').hexdigest(),
self.auth['app_token'],
expires,
])
return sign_str
def _sign(self, string):
"""Calculates, then Base64-encodes HMAC for provided string."""
return b64encode(
hmac.new(
self.auth['app_secret'].encode('utf-8'),
string.encode('utf-8'),
sha1,
).digest()
).rstrip(b'=')
def _create_signed_url(self, path, method, param_dict=None, body_data=None):
"""Produces signed URL."""
expires = time.strftime('%Y-%m-%dT%H:%M:%SZ',
time.gmtime(time.time() + EXPIRES))
if param_dict is None: param_dict = dict()
param_dict['key'] = self.auth['app_key']
param_dict['token'] = self.auth['app_token']
param_dict['expires'] = expires
params = sorted([(a, b) for a, b in param_dict.items()])
sign_str = self._create_sign_str(
path, method, params, expires, body_data,
)
sign = self._sign(sign_str)
params += [('sign', sign)]
url = self.host + path + '?' + urlencode(params)
return url
def jsonify_params(self, kwargs):
"""JSON-ifies all keyword arguments of type dict."""
return {
key: json.dumps(value) if type(value) == dict else value
for (key, value) in kwargs.items()
}
def _post_patch_put(self, method, url, obj_dict, **kwargs):
"""Common steps for all methods which create or edit objects."""
# Convert provided Python dictionary object into JSON
body_data = json.dumps(obj_dict)
signed_url = self._create_signed_url(
url,
method.upper(),
self.jsonify_params(kwargs),
body_data=body_data,
)
headers = {"Content-Type": "application/json"}
headers.update(self.headers)
return getattr(requests, method)(
signed_url,
data=body_data,
headers=headers,
)
@request_wrapper
def get(self, url, **kwargs):
if ('q' in kwargs and kwargs['q'].get('filters', None)
or 'filters' in kwargs
):
assert type(kwargs['q']['filters']) == list, (
"Filters must be a list of dicts, wrapped within query parameter `q`."
)
signed_url = self._create_signed_url(url, 'GET', self.jsonify_params(kwargs))
return requests.get(signed_url, headers=self.headers)
@request_wrapper
def delete(self, url):
signed_url = self._create_signed_url(url, 'DELETE')
return requests.delete(signed_url, headers=self.headers)
@request_wrapper
def post(self, url, obj_dict, **kwargs):
return self._post_patch_put('post', url, obj_dict, **kwargs)
@request_wrapper
def put(self, url, obj_dict, **kwargs):
return self._post_patch_put('put', url, obj_dict, **kwargs)
@request_wrapper
def patch(self, url, obj_dict, **kwargs):
return self._post_patch_put('patch', url, obj_dict, **kwargs)
def main():
try:
from IPython import embed
embed(banner1='')
except ImportError:
from code import interact
interact(local={'GoodsCloudAPIClient': GoodsCloudAPIClient})
if __name__ == "__main__":
main()
# Seconds the request is valid. Useful for debugging purposes
EXPIRES = 500
def debug_trace(frame, event, arg):
"""Prints debug info. Used when GoodsCloudAPIClient.debug is set.
Exists to keep the 'functional' code free of log clutter."""
filename = frame.f_code.co_filename
fnname = frame.f_code.co_name
if filename.find("api_client.py") == -1:
return debug_trace
if fnname is 'create_query_params' and event == 'return':
print("\n--- Query args:\n{}".format(arg))
elif fnname is '_create_sign_str' and event == 'call':
print("\n--- App secret:\n{}".format(
frame.f_locals['self'].auth['app_secret']))
print("\n--- Signature string input parameters: ")
for (arg, val) in frame.f_locals.items():
if arg == 'self': continue
print("{}: {}".format(arg, val))
elif fnname is "_sign" and event == 'call':
print("\n--- Composed string to-be-signed:\n{}".format(
repr(frame.f_locals['string'])))
elif fnname is "_sign" and event == 'return':
print("\n--- Resulting signature:\n{}".format(arg))
elif fnname is '_create_signed_url' and event == 'return':
url = frame.f_locals['url']
print("\n--- Resulting URL:\n{}".format(url))
print("\n--- Unquoted URL:\n{}".format(unquote_plus(url)))
return debug_trace
| goodscloud/goodscloud-python | goodscloud_api_client/client.py | Python | bsd-3-clause | 7,983 |
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..minc import Dump
def test_Dump_inputs():
input_map = dict(annotations_brief=dict(argstr='-b %s',
xor=('annotations_brief', 'annotations_full'),
),
annotations_full=dict(argstr='-f %s',
xor=('annotations_brief', 'annotations_full'),
),
args=dict(argstr='%s',
),
coordinate_data=dict(argstr='-c',
xor=('coordinate_data', 'header_data'),
),
environ=dict(nohash=True,
usedefault=True,
),
header_data=dict(argstr='-h',
xor=('coordinate_data', 'header_data'),
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
input_file=dict(argstr='%s',
mandatory=True,
position=-2,
),
line_length=dict(argstr='-l %d',
usedefault=False,
),
netcdf_name=dict(argstr='-n %s',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
output_file=dict(hash_files=False,
keep_extension=False,
name_source=['input_file'],
name_template='%s_dump.txt',
position=-1,
),
precision=dict(argstr='%s',
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
variables=dict(argstr='-v %s',
sep=',',
),
)
inputs = Dump.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_Dump_outputs():
output_map = dict(output_file=dict(),
)
outputs = Dump.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| mick-d/nipype | nipype/interfaces/minc/tests/test_auto_Dump.py | Python | bsd-3-clause | 1,765 |
import collections
from sympy import (
Abs, E, Float, I, Integer, Max, Min, N, Poly, Pow, PurePoly, Rational,
S, Symbol, cos, exp, oo, pi, signsimp, simplify, sin, sqrt, symbols,
sympify, trigsimp, sstr)
from sympy.matrices.matrices import (ShapeError, MatrixError,
NonSquareMatrixError, DeferredVector)
from sympy.matrices import (
GramSchmidt, ImmutableMatrix, ImmutableSparseMatrix, Matrix,
SparseMatrix, casoratian, diag, eye, hessian,
matrix_multiply_elementwise, ones, randMatrix, rot_axis1, rot_axis2,
rot_axis3, wronskian, zeros)
from sympy.core.compatibility import long, iterable, u, range
from sympy.utilities.iterables import flatten, capture
from sympy.utilities.pytest import raises, XFAIL, slow, skip
from sympy.abc import x, y, z
# don't re-order this list
classes = (Matrix, SparseMatrix, ImmutableMatrix, ImmutableSparseMatrix)
def test_args():
for c, cls in enumerate(classes):
m = cls.zeros(3, 2)
# all should give back the same type of arguments, e.g. ints for shape
assert m.shape == (3, 2) and all(type(i) is int for i in m.shape)
assert m.rows == 3 and type(m.rows) is int
assert m.cols == 2 and type(m.cols) is int
if not c % 2:
assert type(m._mat) is list
else:
assert type(m._smat) is dict
def test_division():
v = Matrix(1, 2, [x, y])
assert v.__div__(z) == Matrix(1, 2, [x/z, y/z])
assert v.__truediv__(z) == Matrix(1, 2, [x/z, y/z])
assert v/z == Matrix(1, 2, [x/z, y/z])
def test_sum():
m = Matrix([[1, 2, 3], [x, y, x], [2*y, -50, z*x]])
assert m + m == Matrix([[2, 4, 6], [2*x, 2*y, 2*x], [4*y, -100, 2*z*x]])
n = Matrix(1, 2, [1, 2])
raises(ShapeError, lambda: m + n)
def test_addition():
a = Matrix((
(1, 2),
(3, 1),
))
b = Matrix((
(1, 2),
(3, 0),
))
assert a + b == a.add(b) == Matrix([[2, 4], [6, 1]])
def test_fancy_index_matrix():
for M in (Matrix, SparseMatrix):
a = M(3, 3, range(9))
assert a == a[:, :]
assert a[1, :] == Matrix(1, 3, [3, 4, 5])
assert a[:, 1] == Matrix([1, 4, 7])
assert a[[0, 1], :] == Matrix([[0, 1, 2], [3, 4, 5]])
assert a[[0, 1], 2] == a[[0, 1], [2]]
assert a[2, [0, 1]] == a[[2], [0, 1]]
assert a[:, [0, 1]] == Matrix([[0, 1], [3, 4], [6, 7]])
assert a[0, 0] == 0
assert a[0:2, :] == Matrix([[0, 1, 2], [3, 4, 5]])
assert a[:, 0:2] == Matrix([[0, 1], [3, 4], [6, 7]])
assert a[::2, 1] == a[[0, 2], 1]
assert a[1, ::2] == a[1, [0, 2]]
a = M(3, 3, range(9))
assert a[[0, 2, 1, 2, 1], :] == Matrix([
[0, 1, 2],
[6, 7, 8],
[3, 4, 5],
[6, 7, 8],
[3, 4, 5]])
assert a[:, [0,2,1,2,1]] == Matrix([
[0, 2, 1, 2, 1],
[3, 5, 4, 5, 4],
[6, 8, 7, 8, 7]])
a = SparseMatrix.zeros(3)
a[1, 2] = 2
a[0, 1] = 3
a[2, 0] = 4
assert a.extract([1, 1], [2]) == Matrix([
[2],
[2]])
assert a.extract([1, 0], [2, 2, 2]) == Matrix([
[2, 2, 2],
[0, 0, 0]])
assert a.extract([1, 0, 1, 2], [2, 0, 1, 0]) == Matrix([
[2, 0, 0, 0],
[0, 0, 3, 0],
[2, 0, 0, 0],
[0, 4, 0, 4]])
def test_multiplication():
a = Matrix((
(1, 2),
(3, 1),
(0, 6),
))
b = Matrix((
(1, 2),
(3, 0),
))
c = a*b
assert c[0, 0] == 7
assert c[0, 1] == 2
assert c[1, 0] == 6
assert c[1, 1] == 6
assert c[2, 0] == 18
assert c[2, 1] == 0
h = matrix_multiply_elementwise(a, c)
assert h == a.multiply_elementwise(c)
assert h[0, 0] == 7
assert h[0, 1] == 4
assert h[1, 0] == 18
assert h[1, 1] == 6
assert h[2, 0] == 0
assert h[2, 1] == 0
raises(ShapeError, lambda: matrix_multiply_elementwise(a, b))
c = b * Symbol("x")
assert isinstance(c, Matrix)
assert c[0, 0] == x
assert c[0, 1] == 2*x
assert c[1, 0] == 3*x
assert c[1, 1] == 0
c2 = x * b
assert c == c2
c = 5 * b
assert isinstance(c, Matrix)
assert c[0, 0] == 5
assert c[0, 1] == 2*5
assert c[1, 0] == 3*5
assert c[1, 1] == 0
def test_power():
raises(NonSquareMatrixError, lambda: Matrix((1, 2))**2)
R = Rational
A = Matrix([[2, 3], [4, 5]])
assert (A**-3)[:] == [R(-269)/8, R(153)/8, R(51)/2, R(-29)/2]
assert (A**5)[:] == [6140, 8097, 10796, 14237]
A = Matrix([[2, 1, 3], [4, 2, 4], [6, 12, 1]])
assert (A**3)[:] == [290, 262, 251, 448, 440, 368, 702, 954, 433]
assert A**0 == eye(3)
assert A**1 == A
assert (Matrix([[2]]) ** 100)[0, 0] == 2**100
assert eye(2)**10000000 == eye(2)
assert Matrix([[1, 2], [3, 4]])**Integer(2) == Matrix([[7, 10], [15, 22]])
A = Matrix([[33, 24], [48, 57]])
assert (A**(S(1)/2))[:] == [5, 2, 4, 7]
A = Matrix([[0, 4], [-1, 5]])
assert (A**(S(1)/2))**2 == A
def test_creation():
raises(ValueError, lambda: Matrix(5, 5, range(20)))
raises(IndexError, lambda: Matrix((1, 2))[2])
with raises(IndexError):
Matrix((1, 2))[1:2] = 5
with raises(IndexError):
Matrix((1, 2))[3] = 5
assert Matrix() == Matrix([]) == Matrix([[]]) == Matrix(0, 0, [])
a = Matrix([[x, 0], [0, 0]])
m = a
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
b = Matrix(2, 2, [x, 0, 0, 0])
m = b
assert m.cols == m.rows
assert m.cols == 2
assert m[:] == [x, 0, 0, 0]
assert a == b
assert Matrix(b) == b
c = Matrix((
Matrix((
(1, 2, 3),
(4, 5, 6)
)),
(7, 8, 9)
))
assert c.cols == 3
assert c.rows == 3
assert c[:] == [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Matrix(eye(2)) == eye(2)
assert ImmutableMatrix(ImmutableMatrix(eye(2))) == ImmutableMatrix(eye(2))
assert ImmutableMatrix(c) == c.as_immutable()
assert Matrix(ImmutableMatrix(c)) == ImmutableMatrix(c).as_mutable()
assert c is not Matrix(c)
def test_tolist():
lst = [[S.One, S.Half, x*y, S.Zero], [x, y, z, x**2], [y, -S.One, z*x, 3]]
m = Matrix(lst)
assert m.tolist() == lst
def test_as_mutable():
assert zeros(0, 3).as_mutable() == zeros(0, 3)
assert zeros(0, 3).as_immutable() == ImmutableMatrix(zeros(0, 3))
def test_determinant():
for M in [Matrix(), Matrix([[1]])]:
assert (
M.det() ==
M.det_bareis() ==
M.berkowitz_det() ==
M.det_LU_decomposition() ==
1)
M = Matrix(( (-3, 2),
( 8, -5) ))
assert M.det(method="bareis") == -1
assert M.det(method="berkowitz") == -1
M = Matrix(( (x, 1),
(y, 2*y) ))
assert M.det(method="bareis") == 2*x*y - y
assert M.det(method="berkowitz") == 2*x*y - y
M = Matrix(( (1, 1, 1),
(1, 2, 3),
(1, 3, 6) ))
assert M.det(method="bareis") == 1
assert M.det(method="berkowitz") == 1
M = Matrix(( ( 3, -2, 0, 5),
(-2, 1, -2, 2),
( 0, -2, 5, 0),
( 5, 0, 3, 4) ))
assert M.det(method="bareis") == -289
assert M.det(method="berkowitz") == -289
M = Matrix(( ( 1, 2, 3, 4),
( 5, 6, 7, 8),
( 9, 10, 11, 12),
(13, 14, 15, 16) ))
assert M.det(method="bareis") == 0
assert M.det(method="berkowitz") == 0
M = Matrix(( (3, 2, 0, 0, 0),
(0, 3, 2, 0, 0),
(0, 0, 3, 2, 0),
(0, 0, 0, 3, 2),
(2, 0, 0, 0, 3) ))
assert M.det(method="bareis") == 275
assert M.det(method="berkowitz") == 275
M = Matrix(( (1, 0, 1, 2, 12),
(2, 0, 1, 1, 4),
(2, 1, 1, -1, 3),
(3, 2, -1, 1, 8),
(1, 1, 1, 0, 6) ))
assert M.det(method="bareis") == -55
assert M.det(method="berkowitz") == -55
M = Matrix(( (-5, 2, 3, 4, 5),
( 1, -4, 3, 4, 5),
( 1, 2, -3, 4, 5),
( 1, 2, 3, -2, 5),
( 1, 2, 3, 4, -1) ))
assert M.det(method="bareis") == 11664
assert M.det(method="berkowitz") == 11664
M = Matrix(( ( 2, 7, -1, 3, 2),
( 0, 0, 1, 0, 1),
(-2, 0, 7, 0, 2),
(-3, -2, 4, 5, 3),
( 1, 0, 0, 0, 1) ))
assert M.det(method="bareis") == 123
assert M.det(method="berkowitz") == 123
M = Matrix(( (x, y, z),
(1, 0, 0),
(y, z, x) ))
assert M.det(method="bareis") == z**2 - x*y
assert M.det(method="berkowitz") == z**2 - x*y
def test_det_LU_decomposition():
for M in [Matrix(), Matrix([[1]])]:
assert M.det(method="det_LU") == 1
M = Matrix(( (-3, 2),
( 8, -5) ))
assert M.det(method="det_LU") == -1
M = Matrix(( (x, 1),
(y, 2*y) ))
assert M.det(method="det_LU") == 2*x*y - y
M = Matrix(( (1, 1, 1),
(1, 2, 3),
(1, 3, 6) ))
assert M.det(method="det_LU") == 1
M = Matrix(( ( 3, -2, 0, 5),
(-2, 1, -2, 2),
( 0, -2, 5, 0),
( 5, 0, 3, 4) ))
assert M.det(method="det_LU") == -289
M = Matrix(( (3, 2, 0, 0, 0),
(0, 3, 2, 0, 0),
(0, 0, 3, 2, 0),
(0, 0, 0, 3, 2),
(2, 0, 0, 0, 3) ))
assert M.det(method="det_LU") == 275
M = Matrix(( (1, 0, 1, 2, 12),
(2, 0, 1, 1, 4),
(2, 1, 1, -1, 3),
(3, 2, -1, 1, 8),
(1, 1, 1, 0, 6) ))
assert M.det(method="det_LU") == -55
M = Matrix(( (-5, 2, 3, 4, 5),
( 1, -4, 3, 4, 5),
( 1, 2, -3, 4, 5),
( 1, 2, 3, -2, 5),
( 1, 2, 3, 4, -1) ))
assert M.det(method="det_LU") == 11664
M = Matrix(( ( 2, 7, -1, 3, 2),
( 0, 0, 1, 0, 1),
(-2, 0, 7, 0, 2),
(-3, -2, 4, 5, 3),
( 1, 0, 0, 0, 1) ))
assert M.det(method="det_LU") == 123
M = Matrix(( (x, y, z),
(1, 0, 0),
(y, z, x) ))
assert M.det(method="det_LU") == z**2 - x*y
def test_berkowitz_minors():
B = Matrix(2, 2, [1, 2, 2, 1])
assert B.berkowitz_minors() == (1, -3)
def test_slicing():
m0 = eye(4)
assert m0[:3, :3] == eye(3)
assert m0[2:4, 0:2] == zeros(2)
m1 = Matrix(3, 3, lambda i, j: i + j)
assert m1[0, :] == Matrix(1, 3, (0, 1, 2))
assert m1[1:3, 1] == Matrix(2, 1, (2, 3))
m2 = Matrix([[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11], [12, 13, 14, 15]])
assert m2[:, -1] == Matrix(4, 1, [3, 7, 11, 15])
assert m2[-2:, :] == Matrix([[8, 9, 10, 11], [12, 13, 14, 15]])
def test_submatrix_assignment():
m = zeros(4)
m[2:4, 2:4] = eye(2)
assert m == Matrix(((0, 0, 0, 0),
(0, 0, 0, 0),
(0, 0, 1, 0),
(0, 0, 0, 1)))
m[:2, :2] = eye(2)
assert m == eye(4)
m[:, 0] = Matrix(4, 1, (1, 2, 3, 4))
assert m == Matrix(((1, 0, 0, 0),
(2, 1, 0, 0),
(3, 0, 1, 0),
(4, 0, 0, 1)))
m[:, :] = zeros(4)
assert m == zeros(4)
m[:, :] = [(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16)]
assert m == Matrix(((1, 2, 3, 4),
(5, 6, 7, 8),
(9, 10, 11, 12),
(13, 14, 15, 16)))
m[:2, 0] = [0, 0]
assert m == Matrix(((0, 2, 3, 4),
(0, 6, 7, 8),
(9, 10, 11, 12),
(13, 14, 15, 16)))
def test_extract():
m = Matrix(4, 3, lambda i, j: i*3 + j)
assert m.extract([0, 1, 3], [0, 1]) == Matrix(3, 2, [0, 1, 3, 4, 9, 10])
assert m.extract([0, 3], [0, 0, 2]) == Matrix(2, 3, [0, 0, 2, 9, 9, 11])
assert m.extract(range(4), range(3)) == m
raises(IndexError, lambda: m.extract([4], [0]))
raises(IndexError, lambda: m.extract([0], [3]))
def test_reshape():
m0 = eye(3)
assert m0.reshape(1, 9) == Matrix(1, 9, (1, 0, 0, 0, 1, 0, 0, 0, 1))
m1 = Matrix(3, 4, lambda i, j: i + j)
assert m1.reshape(
4, 3) == Matrix(((0, 1, 2), (3, 1, 2), (3, 4, 2), (3, 4, 5)))
assert m1.reshape(2, 6) == Matrix(((0, 1, 2, 3, 1, 2), (3, 4, 2, 3, 4, 5)))
def test_applyfunc():
m0 = eye(3)
assert m0.applyfunc(lambda x: 2*x) == eye(3)*2
assert m0.applyfunc(lambda x: 0) == zeros(3)
def test_expand():
m0 = Matrix([[x*(x + y), 2], [((x + y)*y)*x, x*(y + x*(x + y))]])
# Test if expand() returns a matrix
m1 = m0.expand()
assert m1 == Matrix(
[[x*y + x**2, 2], [x*y**2 + y*x**2, x*y + y*x**2 + x**3]])
a = Symbol('a', real=True)
assert Matrix([exp(I*a)]).expand(complex=True) == \
Matrix([cos(a) + I*sin(a)])
assert Matrix([[0, 1, 2], [0, 0, -1], [0, 0, 0]]).exp() == Matrix([
[1, 1, Rational(3, 2)],
[0, 1, -1],
[0, 0, 1]]
)
def test_random():
M = randMatrix(3, 3)
M = randMatrix(3, 3, seed=3)
M = randMatrix(3, 4, 0, 150)
M = randMatrix(3, symmetric=True)
S = M.copy()
S.simplify()
assert S == M # doesn't fail when elements are Numbers, not int
def test_LUdecomp():
testmat = Matrix([[0, 2, 5, 3],
[3, 3, 7, 4],
[8, 4, 0, 2],
[-2, 6, 3, 4]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == zeros(4)
testmat = Matrix([[6, -2, 7, 4],
[0, 3, 6, 7],
[1, -2, 7, 4],
[-9, 2, 6, 3]])
L, U, p = testmat.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - testmat == zeros(4)
M = Matrix(((1, x, 1), (2, y, 0), (y, 0, z)))
L, U, p = M.LUdecomposition()
assert L.is_lower
assert U.is_upper
assert (L*U).permuteBkwd(p) - M == zeros(3)
mL = Matrix((
(1, 0, 0),
(2, 3, 0),
))
assert mL.is_lower is True
assert mL.is_upper is False
mU = Matrix((
(1, 2, 3),
(0, 4, 5),
))
assert mU.is_lower is False
assert mU.is_upper is True
# test FF LUdecomp
M = Matrix([[1, 3, 3],
[3, 2, 6],
[3, 2, 2]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
M = Matrix([[1, 2, 3, 4],
[3, -1, 2, 3],
[3, 1, 3, -2],
[6, -1, 0, 2]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
M = Matrix([[0, 0, 1],
[2, 3, 0],
[3, 1, 4]])
P, L, Dee, U = M.LUdecompositionFF()
assert P*M == L*Dee.inv()*U
def test_LUsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.LUsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.LUsolve(b)
assert soln == x
def test_QRsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.QRsolve(b)
assert soln == x
x = Matrix([[1, 2], [3, 4], [5, 6]])
b = A*x
soln = A.QRsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.QRsolve(b)
assert soln == x
x = Matrix([[7, 8], [9, 10], [11, 12]])
b = A*x
soln = A.QRsolve(b)
assert soln == x
def test_inverse():
A = eye(4)
assert A.inv() == eye(4)
assert A.inv(method="LU") == eye(4)
assert A.inv(method="ADJ") == eye(4)
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
Ainv = A.inv()
assert A*Ainv == eye(3)
assert A.inv(method="LU") == Ainv
assert A.inv(method="ADJ") == Ainv
# test that immutability is not a problem
cls = ImmutableMatrix
m = cls([[48, 49, 31],
[ 9, 71, 94],
[59, 28, 65]])
assert all(type(m.inv(s)) is cls for s in 'GE ADJ LU'.split())
cls = ImmutableSparseMatrix
m = cls([[48, 49, 31],
[ 9, 71, 94],
[59, 28, 65]])
assert all(type(m.inv(s)) is cls for s in 'CH LDL'.split())
def test_matrix_inverse_mod():
A = Matrix(2, 1, [1, 0])
raises(NonSquareMatrixError, lambda: A.inv_mod(2))
A = Matrix(2, 2, [1, 0, 0, 0])
raises(ValueError, lambda: A.inv_mod(2))
A = Matrix(2, 2, [1, 2, 3, 4])
Ai = Matrix(2, 2, [1, 1, 0, 1])
assert A.inv_mod(3) == Ai
A = Matrix(2, 2, [1, 0, 0, 1])
assert A.inv_mod(2) == A
def test_util():
R = Rational
v1 = Matrix(1, 3, [1, 2, 3])
v2 = Matrix(1, 3, [3, 4, 5])
assert v1.norm() == sqrt(14)
assert v1.project(v2) == Matrix(1, 3, [R(39)/25, R(52)/25, R(13)/5])
assert Matrix.zeros(1, 2) == Matrix(1, 2, [0, 0])
assert ones(1, 2) == Matrix(1, 2, [1, 1])
assert v1.copy() == v1
# cofactor
assert eye(3) == eye(3).cofactorMatrix()
test = Matrix([[1, 3, 2], [2, 6, 3], [2, 3, 6]])
assert test.cofactorMatrix() == \
Matrix([[27, -6, -6], [-12, 2, 3], [-3, 1, 0]])
test = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
assert test.cofactorMatrix() == \
Matrix([[-3, 6, -3], [6, -12, 6], [-3, 6, -3]])
def test_jacobian_hessian():
L = Matrix(1, 2, [x**2*y, 2*y**2 + x*y])
syms = [x, y]
assert L.jacobian(syms) == Matrix([[2*x*y, x**2], [y, 4*y + x]])
L = Matrix(1, 2, [x, x**2*y**3])
assert L.jacobian(syms) == Matrix([[1, 0], [2*x*y**3, x**2*3*y**2]])
f = x**2*y
syms = [x, y]
assert hessian(f, syms) == Matrix([[2*y, 2*x], [2*x, 0]])
f = x**2*y**3
assert hessian(f, syms) == \
Matrix([[2*y**3, 6*x*y**2], [6*x*y**2, 6*x**2*y]])
f = z + x*y**2
g = x**2 + 2*y**3
ans = Matrix([[0, 2*y],
[2*y, 2*x]])
assert ans == hessian(f, Matrix([x, y]))
assert ans == hessian(f, Matrix([x, y]).T)
assert hessian(f, (y, x), [g]) == Matrix([
[ 0, 6*y**2, 2*x],
[6*y**2, 2*x, 2*y],
[ 2*x, 2*y, 0]])
def test_QR():
A = Matrix([[1, 2], [2, 3]])
Q, S = A.QRdecomposition()
R = Rational
assert Q == Matrix([
[ 5**R(-1, 2), (R(2)/5)*(R(1)/5)**R(-1, 2)],
[2*5**R(-1, 2), (-R(1)/5)*(R(1)/5)**R(-1, 2)]])
assert S == Matrix([[5**R(1, 2), 8*5**R(-1, 2)], [0, (R(1)/5)**R(1, 2)]])
assert Q*S == A
assert Q.T * Q == eye(2)
A = Matrix([[1, 1, 1], [1, 1, 3], [2, 3, 4]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
def test_QR_non_square():
A = Matrix([[9, 0, 26], [12, 0, -7], [0, 4, 4], [0, -3, -3]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
A = Matrix([[1, -1, 4], [1, 4, -2], [1, 4, 2], [1, -1, 0]])
Q, R = A.QRdecomposition()
assert Q.T * Q == eye(Q.cols)
assert R.is_upper
assert A == Q*R
def test_nullspace():
# first test reduced row-ech form
R = Rational
M = Matrix([[5, 7, 2, 1],
[1, 6, 2, -1]])
out, tmp = M.rref()
assert out == Matrix([[1, 0, -R(2)/23, R(13)/23],
[0, 1, R(8)/23, R(-6)/23]])
M = Matrix([[-5, -1, 4, -3, -1],
[ 1, -1, -1, 1, 0],
[-1, 0, 0, 0, 0],
[ 4, 1, -4, 3, 1],
[-2, 0, 2, -2, -1]])
assert M*M.nullspace()[0] == Matrix(5, 1, [0]*5)
M = Matrix([[ 1, 3, 0, 2, 6, 3, 1],
[-2, -6, 0, -2, -8, 3, 1],
[ 3, 9, 0, 0, 6, 6, 2],
[-1, -3, 0, 1, 0, 9, 3]])
out, tmp = M.rref()
assert out == Matrix([[1, 3, 0, 0, 2, 0, 0],
[0, 0, 0, 1, 2, 0, 0],
[0, 0, 0, 0, 0, 1, R(1)/3],
[0, 0, 0, 0, 0, 0, 0]])
# now check the vectors
basis = M.nullspace()
assert basis[0] == Matrix([-3, 1, 0, 0, 0, 0, 0])
assert basis[1] == Matrix([0, 0, 1, 0, 0, 0, 0])
assert basis[2] == Matrix([-2, 0, 0, -2, 1, 0, 0])
assert basis[3] == Matrix([0, 0, 0, 0, 0, R(-1)/3, 1])
# issue 4797; just see that we can do it when rows > cols
M = Matrix([[1, 2], [2, 4], [3, 6]])
assert M.nullspace()
def test_wronskian():
assert wronskian([cos(x), sin(x)], x) == cos(x)**2 + sin(x)**2
assert wronskian([exp(x), exp(2*x)], x) == exp(3*x)
assert wronskian([exp(x), x], x) == exp(x) - x*exp(x)
assert wronskian([1, x, x**2], x) == 2
w1 = -6*exp(x)*sin(x)*x + 6*cos(x)*exp(x)*x**2 - 6*exp(x)*cos(x)*x - \
exp(x)*cos(x)*x**3 + exp(x)*sin(x)*x**3
assert wronskian([exp(x), cos(x), x**3], x).expand() == w1
assert wronskian([exp(x), cos(x), x**3], x, method='berkowitz').expand() \
== w1
w2 = -x**3*cos(x)**2 - x**3*sin(x)**2 - 6*x*cos(x)**2 - 6*x*sin(x)**2
assert wronskian([sin(x), cos(x), x**3], x).expand() == w2
assert wronskian([sin(x), cos(x), x**3], x, method='berkowitz').expand() \
== w2
assert wronskian([], x) == 1
def test_eigen():
R = Rational
assert eye(3).charpoly(x) == Poly((x - 1)**3, x)
assert eye(3).charpoly(y) == Poly((y - 1)**3, y)
M = Matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
assert M.eigenvals(multiple=False) == {S.One: 3}
assert M.eigenvects() == (
[(1, 3, [Matrix([1, 0, 0]),
Matrix([0, 1, 0]),
Matrix([0, 0, 1])])])
M = Matrix([[0, 1, 1],
[1, 0, 0],
[1, 1, 1]])
assert M.eigenvals() == {2*S.One: 1, -S.One: 1, S.Zero: 1}
assert M.eigenvects() == (
[
(-1, 1, [Matrix([-1, 1, 0])]),
( 0, 1, [Matrix([0, -1, 1])]),
( 2, 1, [Matrix([R(2, 3), R(1, 3), 1])])
])
a = Symbol('a')
M = Matrix([[a, 0],
[0, 1]])
assert M.eigenvals() == {a: 1, S.One: 1}
M = Matrix([[1, -1],
[1, 3]])
assert M.eigenvects() == ([(2, 2, [Matrix(2, 1, [-1, 1])])])
M = Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
a = R(15, 2)
b = 3*33**R(1, 2)
c = R(13, 2)
d = (R(33, 8) + 3*b/8)
e = (R(33, 8) - 3*b/8)
def NS(e, n):
return str(N(e, n))
r = [
(a - b/2, 1, [Matrix([(12 + 24/(c - b/2))/((c - b/2)*e) + 3/(c - b/2),
(6 + 12/(c - b/2))/e, 1])]),
( 0, 1, [Matrix([1, -2, 1])]),
(a + b/2, 1, [Matrix([(12 + 24/(c + b/2))/((c + b/2)*d) + 3/(c + b/2),
(6 + 12/(c + b/2))/d, 1])]),
]
r1 = [(NS(r[i][0], 2), NS(r[i][1], 2),
[NS(j, 2) for j in r[i][2][0]]) for i in range(len(r))]
r = M.eigenvects()
r2 = [(NS(r[i][0], 2), NS(r[i][1], 2),
[NS(j, 2) for j in r[i][2][0]]) for i in range(len(r))]
assert sorted(r1) == sorted(r2)
eps = Symbol('eps', real=True)
M = Matrix([[abs(eps), I*eps ],
[-I*eps, abs(eps) ]])
assert M.eigenvects() == (
[
( 0, 1, [Matrix([[-I*eps/abs(eps)], [1]])]),
( 2*abs(eps), 1, [ Matrix([[I*eps/abs(eps)], [1]]) ] ),
])
M = Matrix(3, 3, [1, 2, 0, 0, 3, 0, 2, -4, 2])
M._eigenvects = M.eigenvects(simplify=False)
assert max(i.q for i in M._eigenvects[0][2][0]) > 1
M._eigenvects = M.eigenvects(simplify=True)
assert max(i.q for i in M._eigenvects[0][2][0]) == 1
M = Matrix([[S(1)/4, 1], [1, 1]])
assert M.eigenvects(simplify=True) == [
(S(5)/8 + sqrt(73)/8, 1, [Matrix([[8/(3 + sqrt(73))], [1]])]),
(-sqrt(73)/8 + S(5)/8, 1, [Matrix([[8/(-sqrt(73) + 3)], [1]])])]
assert M.eigenvects(simplify=False) == [
(Rational(5, 8) + sqrt(73)/8, 1,
[Matrix([[-1/(-sqrt(73)/8 + Rational(-3, 8))], [1]])]),
(-sqrt(73)/8 + Rational(5, 8), 1,
[Matrix([[-1/(Rational(-3, 8) + sqrt(73)/8)], [1]])]),
]
m = Matrix([[1, .6, .6], [.6, .9, .9], [.9, .6, .6]])
evals = {-sqrt(385)/20 + S(5)/4: 1, sqrt(385)/20 + S(5)/4: 1, S.Zero: 1}
assert m.eigenvals() == evals
nevals = list(sorted(m.eigenvals(rational=False).keys()))
sevals = list(sorted(evals.keys()))
assert all(abs(nevals[i] - sevals[i]) < 1e-9 for i in range(len(nevals)))
def test_subs():
assert Matrix([[1, x], [x, 4]]).subs(x, 5) == Matrix([[1, 5], [5, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs([[x, -1], [y, -2]]) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs([(x, -1), (y, -2)]) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([[x, 2], [x + y, 4]]).subs({x: -1, y: -2}) == \
Matrix([[-1, 2], [-3, 4]])
assert Matrix([x*y]).subs({x: y - 1, y: x - 1}, simultaneous=True) == \
Matrix([(x - 1)*(y - 1)])
for cls in classes:
assert Matrix([[2, 0], [0, 2]]) == cls.eye(2).subs(1, 2)
def test_simplify():
f, n = symbols('f, n')
m = Matrix([[1, x], [x + 1/x, x - 1]])
m = m.row_join(eye(m.cols))
raw = m.rref(simplify=lambda x: x)[0]
assert raw != m.rref(simplify=True)[0]
M = Matrix([[ 1/x + 1/y, (x + x*y) / x ],
[ (f(x) + y*f(x))/f(x), 2 * (1/n - cos(n * pi)/n) / pi ]])
M.simplify()
assert M == Matrix([[ (x + y)/(x * y), 1 + y ],
[ 1 + y, 2*((1 - 1*cos(pi*n))/(pi*n)) ]])
eq = (1 + x)**2
M = Matrix([[eq]])
M.simplify()
assert M == Matrix([[eq]])
M.simplify(ratio=oo) == M
assert M == Matrix([[eq.simplify(ratio=oo)]])
def test_transpose():
M = Matrix([[1, 2, 3, 4, 5, 6, 7, 8, 9, 0],
[1, 2, 3, 4, 5, 6, 7, 8, 9, 0]])
assert M.T == Matrix( [ [1, 1],
[2, 2],
[3, 3],
[4, 4],
[5, 5],
[6, 6],
[7, 7],
[8, 8],
[9, 9],
[0, 0] ])
assert M.T.T == M
assert M.T == M.transpose()
def test_conjugate():
M = Matrix([[0, I, 5],
[1, 2, 0]])
assert M.T == Matrix([[0, 1],
[I, 2],
[5, 0]])
assert M.C == Matrix([[0, -I, 5],
[1, 2, 0]])
assert M.C == M.conjugate()
assert M.H == M.T.C
assert M.H == Matrix([[ 0, 1],
[-I, 2],
[ 5, 0]])
def test_conj_dirac():
raises(AttributeError, lambda: eye(3).D)
M = Matrix([[1, I, I, I],
[0, 1, I, I],
[0, 0, 1, I],
[0, 0, 0, 1]])
assert M.D == Matrix([[ 1, 0, 0, 0],
[-I, 1, 0, 0],
[-I, -I, -1, 0],
[-I, -I, I, -1]])
def test_trace():
M = Matrix([[1, 0, 0],
[0, 5, 0],
[0, 0, 8]])
assert M.trace() == 14
def test_shape():
M = Matrix([[x, 0, 0],
[0, y, 0]])
assert M.shape == (2, 3)
def test_col_row_op():
M = Matrix([[x, 0, 0],
[0, y, 0]])
M.row_op(1, lambda r, j: r + j + 1)
assert M == Matrix([[x, 0, 0],
[1, y + 2, 3]])
M.col_op(0, lambda c, j: c + y**j)
assert M == Matrix([[x + 1, 0, 0],
[1 + y, y + 2, 3]])
# neither row nor slice give copies that allow the original matrix to
# be changed
assert M.row(0) == Matrix([[x + 1, 0, 0]])
r1 = M.row(0)
r1[0] = 42
assert M[0, 0] == x + 1
r1 = M[0, :-1] # also testing negative slice
r1[0] = 42
assert M[0, 0] == x + 1
c1 = M.col(0)
assert c1 == Matrix([x + 1, 1 + y])
c1[0] = 0
assert M[0, 0] == x + 1
c1 = M[:, 0]
c1[0] = 42
assert M[0, 0] == x + 1
def test_zip_row_op():
for cls in classes[:2]: # XXX: immutable matrices don't support row ops
M = cls.eye(3)
M.zip_row_op(1, 0, lambda v, u: v + 2*u)
assert M == cls([[1, 0, 0],
[2, 1, 0],
[0, 0, 1]])
M = cls.eye(3)*2
M[0, 1] = -1
M.zip_row_op(1, 0, lambda v, u: v + 2*u); M
assert M == cls([[2, -1, 0],
[4, 0, 0],
[0, 0, 2]])
def test_issue_3950():
m = Matrix([1, 2, 3])
a = Matrix([1, 2, 3])
b = Matrix([2, 2, 3])
assert not (m in [])
assert not (m in [1])
assert m != 1
assert m == a
assert m != b
def test_issue_3981():
class Index1(object):
def __index__(self):
return 1
class Index2(object):
def __index__(self):
return 2
index1 = Index1()
index2 = Index2()
m = Matrix([1, 2, 3])
assert m[index2] == 3
m[index2] = 5
assert m[2] == 5
m = Matrix([[1, 2, 3], [4, 5, 6]])
assert m[index1, index2] == 6
assert m[1, index2] == 6
assert m[index1, 2] == 6
m[index1, index2] = 4
assert m[1, 2] == 4
m[1, index2] = 6
assert m[1, 2] == 6
m[index1, 2] = 8
assert m[1, 2] == 8
def test_evalf():
a = Matrix([sqrt(5), 6])
assert all(a.evalf()[i] == a[i].evalf() for i in range(2))
assert all(a.evalf(2)[i] == a[i].evalf(2) for i in range(2))
assert all(a.n(2)[i] == a[i].n(2) for i in range(2))
def test_is_symbolic():
a = Matrix([[x, x], [x, x]])
assert a.is_symbolic() is True
a = Matrix([[1, 2, 3, 4], [5, 6, 7, 8]])
assert a.is_symbolic() is False
a = Matrix([[1, 2, 3, 4], [5, 6, x, 8]])
assert a.is_symbolic() is True
a = Matrix([[1, x, 3]])
assert a.is_symbolic() is True
a = Matrix([[1, 2, 3]])
assert a.is_symbolic() is False
a = Matrix([[1], [x], [3]])
assert a.is_symbolic() is True
a = Matrix([[1], [2], [3]])
assert a.is_symbolic() is False
def test_is_upper():
a = Matrix([[1, 2, 3]])
assert a.is_upper is True
a = Matrix([[1], [2], [3]])
assert a.is_upper is False
def test_is_lower():
a = Matrix([[1, 2, 3]])
assert a.is_lower is False
a = Matrix([[1], [2], [3]])
assert a.is_lower is True
def test_is_nilpotent():
a = Matrix(4, 4, [0, 2, 1, 6, 0, 0, 1, 2, 0, 0, 0, 3, 0, 0, 0, 0])
assert a.is_nilpotent()
a = Matrix([[1, 0], [0, 1]])
assert not a.is_nilpotent()
def test_zeros_ones_fill():
n, m = 3, 5
a = zeros(n, m)
a.fill( 5 )
b = 5 * ones(n, m)
assert a == b
assert a.rows == b.rows == 3
assert a.cols == b.cols == 5
assert a.shape == b.shape == (3, 5)
assert zeros(2) == zeros(2, 2)
assert ones(2) == ones(2, 2)
assert zeros(2, 3) == Matrix(2, 3, [0]*6)
assert ones(2, 3) == Matrix(2, 3, [1]*6)
def test_empty_zeros():
a = zeros(0)
assert a == Matrix()
a = zeros(0, 2)
assert a.rows == 0
assert a.cols == 2
a = zeros(2, 0)
assert a.rows == 2
assert a.cols == 0
def test_issue_3749():
a = Matrix([[x**2, x*y], [x*sin(y), x*cos(y)]])
assert a.diff(x) == Matrix([[2*x, y], [sin(y), cos(y)]])
assert Matrix([
[x, -x, x**2],
[exp(x), 1/x - exp(-x), x + 1/x]]).limit(x, oo) == \
Matrix([[oo, -oo, oo], [oo, 0, oo]])
assert Matrix([
[(exp(x) - 1)/x, 2*x + y*x, x**x ],
[1/x, abs(x), abs(sin(x + 1))]]).limit(x, 0) == \
Matrix([[1, 0, 1], [oo, 0, sin(1)]])
assert a.integrate(x) == Matrix([
[Rational(1, 3)*x**3, y*x**2/2],
[x**2*sin(y)/2, x**2*cos(y)/2]])
def test_inv_iszerofunc():
A = eye(4)
A.col_swap(0, 1)
for method in "GE", "LU":
assert A.inv(method=method, iszerofunc=lambda x: x == 0) == \
A.inv(method="ADJ")
def test_jacobian_metrics():
rho, phi = symbols("rho,phi")
X = Matrix([rho*cos(phi), rho*sin(phi)])
Y = Matrix([rho, phi])
J = X.jacobian(Y)
assert J == X.jacobian(Y.T)
assert J == (X.T).jacobian(Y)
assert J == (X.T).jacobian(Y.T)
g = J.T*eye(J.shape[0])*J
g = g.applyfunc(trigsimp)
assert g == Matrix([[1, 0], [0, rho**2]])
def test_jacobian2():
rho, phi = symbols("rho,phi")
X = Matrix([rho*cos(phi), rho*sin(phi), rho**2])
Y = Matrix([rho, phi])
J = Matrix([
[cos(phi), -rho*sin(phi)],
[sin(phi), rho*cos(phi)],
[ 2*rho, 0],
])
assert X.jacobian(Y) == J
def test_issue_4564():
X = Matrix([exp(x + y + z), exp(x + y + z), exp(x + y + z)])
Y = Matrix([x, y, z])
for i in range(1, 3):
for j in range(1, 3):
X_slice = X[:i, :]
Y_slice = Y[:j, :]
J = X_slice.jacobian(Y_slice)
assert J.rows == i
assert J.cols == j
for k in range(j):
assert J[:, k] == X_slice
def test_nonvectorJacobian():
X = Matrix([[exp(x + y + z), exp(x + y + z)],
[exp(x + y + z), exp(x + y + z)]])
raises(TypeError, lambda: X.jacobian(Matrix([x, y, z])))
X = X[0, :]
Y = Matrix([[x, y], [x, z]])
raises(TypeError, lambda: X.jacobian(Y))
raises(TypeError, lambda: X.jacobian(Matrix([ [x, y], [x, z] ])))
def test_vec():
m = Matrix([[1, 3], [2, 4]])
m_vec = m.vec()
assert m_vec.cols == 1
for i in range(4):
assert m_vec[i] == i + 1
def test_vech():
m = Matrix([[1, 2], [2, 3]])
m_vech = m.vech()
assert m_vech.cols == 1
for i in range(3):
assert m_vech[i] == i + 1
m_vech = m.vech(diagonal=False)
assert m_vech[0] == 2
m = Matrix([[1, x*(x + y)], [y*x + x**2, 1]])
m_vech = m.vech(diagonal=False)
assert m_vech[0] == x*(x + y)
m = Matrix([[1, x*(x + y)], [y*x, 1]])
m_vech = m.vech(diagonal=False, check_symmetry=False)
assert m_vech[0] == y*x
def test_vech_errors():
m = Matrix([[1, 3]])
raises(ShapeError, lambda: m.vech())
m = Matrix([[1, 3], [2, 4]])
raises(ValueError, lambda: m.vech())
raises(ShapeError, lambda: Matrix([ [1, 3] ]).vech())
raises(ValueError, lambda: Matrix([ [1, 3], [2, 4] ]).vech())
def test_diag():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert diag(a, b, b) == Matrix([
[1, 2, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0],
[0, 0, 3, x, 0, 0],
[0, 0, y, 3, 0, 0],
[0, 0, 0, 0, 3, x],
[0, 0, 0, 0, y, 3],
])
assert diag(a, b, c) == Matrix([
[1, 2, 0, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0, 0],
[0, 0, 3, x, 0, 0, 0],
[0, 0, y, 3, 0, 0, 0],
[0, 0, 0, 0, 3, x, 3],
[0, 0, 0, 0, y, 3, z],
[0, 0, 0, 0, x, y, z],
])
assert diag(a, c, b) == Matrix([
[1, 2, 0, 0, 0, 0, 0],
[2, 3, 0, 0, 0, 0, 0],
[0, 0, 3, x, 3, 0, 0],
[0, 0, y, 3, z, 0, 0],
[0, 0, x, y, z, 0, 0],
[0, 0, 0, 0, 0, 3, x],
[0, 0, 0, 0, 0, y, 3],
])
a = Matrix([x, y, z])
b = Matrix([[1, 2], [3, 4]])
c = Matrix([[5, 6]])
assert diag(a, 7, b, c) == Matrix([
[x, 0, 0, 0, 0, 0],
[y, 0, 0, 0, 0, 0],
[z, 0, 0, 0, 0, 0],
[0, 7, 0, 0, 0, 0],
[0, 0, 1, 2, 0, 0],
[0, 0, 3, 4, 0, 0],
[0, 0, 0, 0, 5, 6],
])
assert diag(1, [2, 3], [[4, 5]]) == Matrix([
[1, 0, 0, 0],
[0, 2, 0, 0],
[0, 3, 0, 0],
[0, 0, 4, 5]])
def test_get_diag_blocks1():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert a.get_diag_blocks() == [a]
assert b.get_diag_blocks() == [b]
assert c.get_diag_blocks() == [c]
def test_get_diag_blocks2():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
assert diag(a, b, b).get_diag_blocks() == [a, b, b]
assert diag(a, b, c).get_diag_blocks() == [a, b, c]
assert diag(a, c, b).get_diag_blocks() == [a, c, b]
assert diag(c, c, b).get_diag_blocks() == [c, c, b]
def test_inv_block():
a = Matrix([[1, 2], [2, 3]])
b = Matrix([[3, x], [y, 3]])
c = Matrix([[3, x, 3], [y, 3, z], [x, y, z]])
A = diag(a, b, b)
assert A.inv(try_block_diag=True) == diag(a.inv(), b.inv(), b.inv())
A = diag(a, b, c)
assert A.inv(try_block_diag=True) == diag(a.inv(), b.inv(), c.inv())
A = diag(a, c, b)
assert A.inv(try_block_diag=True) == diag(a.inv(), c.inv(), b.inv())
A = diag(a, a, b, a, c, a)
assert A.inv(try_block_diag=True) == diag(
a.inv(), a.inv(), b.inv(), a.inv(), c.inv(), a.inv())
assert A.inv(try_block_diag=True, method="ADJ") == diag(
a.inv(method="ADJ"), a.inv(method="ADJ"), b.inv(method="ADJ"),
a.inv(method="ADJ"), c.inv(method="ADJ"), a.inv(method="ADJ"))
def test_creation_args():
"""
Check that matrix dimensions can be specified using any reasonable type
(see issue 4614).
"""
raises(ValueError, lambda: zeros(3, -1))
raises(TypeError, lambda: zeros(1, 2, 3, 4))
assert zeros(long(3)) == zeros(3)
assert zeros(Integer(3)) == zeros(3)
assert zeros(3.) == zeros(3)
assert eye(long(3)) == eye(3)
assert eye(Integer(3)) == eye(3)
assert eye(3.) == eye(3)
assert ones(long(3), Integer(4)) == ones(3, 4)
raises(TypeError, lambda: Matrix(5))
raises(TypeError, lambda: Matrix(1, 2))
def test_diagonal_symmetrical():
m = Matrix(2, 2, [0, 1, 1, 0])
assert not m.is_diagonal()
assert m.is_symmetric()
assert m.is_symmetric(simplify=False)
m = Matrix(2, 2, [1, 0, 0, 1])
assert m.is_diagonal()
m = diag(1, 2, 3)
assert m.is_diagonal()
assert m.is_symmetric()
m = Matrix(3, 3, [1, 0, 0, 0, 2, 0, 0, 0, 3])
assert m == diag(1, 2, 3)
m = Matrix(2, 3, zeros(2, 3))
assert not m.is_symmetric()
assert m.is_diagonal()
m = Matrix(((5, 0), (0, 6), (0, 0)))
assert m.is_diagonal()
m = Matrix(((5, 0, 0), (0, 6, 0)))
assert m.is_diagonal()
m = Matrix(3, 3, [1, x**2 + 2*x + 1, y, (x + 1)**2, 2, 0, y, 0, 3])
assert m.is_symmetric()
assert not m.is_symmetric(simplify=False)
assert m.expand().is_symmetric(simplify=False)
def test_diagonalization():
m = Matrix(3, 2, [-3, 1, -3, 20, 3, 10])
assert not m.is_diagonalizable()
assert not m.is_symmetric()
raises(NonSquareMatrixError, lambda: m.diagonalize())
# diagonalizable
m = diag(1, 2, 3)
(P, D) = m.diagonalize()
assert P == eye(3)
assert D == m
m = Matrix(2, 2, [0, 1, 1, 0])
assert m.is_symmetric()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
m = Matrix(2, 2, [1, 0, 0, 3])
assert m.is_symmetric()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
assert P == eye(2)
assert D == m
m = Matrix(2, 2, [1, 1, 0, 0])
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
m = Matrix(3, 3, [1, 2, 0, 0, 3, 0, 2, -4, 2])
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
for i in P:
assert i.as_numer_denom()[1] == 1
m = Matrix(2, 2, [1, 0, 0, 0])
assert m.is_diagonal()
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
assert P == Matrix([[0, 1], [1, 0]])
# diagonalizable, complex only
m = Matrix(2, 2, [0, 1, -1, 0])
assert not m.is_diagonalizable(True)
raises(MatrixError, lambda: m.diagonalize(True))
assert m.is_diagonalizable()
(P, D) = m.diagonalize()
assert P.inv() * m * P == D
# not diagonalizable
m = Matrix(2, 2, [0, 1, 0, 0])
assert not m.is_diagonalizable()
raises(MatrixError, lambda: m.diagonalize())
m = Matrix(3, 3, [-3, 1, -3, 20, 3, 10, 2, -2, 4])
assert not m.is_diagonalizable()
raises(MatrixError, lambda: m.diagonalize())
# symbolic
a, b, c, d = symbols('a b c d')
m = Matrix(2, 2, [a, c, c, b])
assert m.is_symmetric()
assert m.is_diagonalizable()
@XFAIL
def test_eigen_vects():
m = Matrix(2, 2, [1, 0, 0, I])
raises(NotImplementedError, lambda: m.is_diagonalizable(True))
# !!! bug because of eigenvects() or roots(x**2 + (-1 - I)*x + I, x)
# see issue 5292
assert not m.is_diagonalizable(True)
raises(MatrixError, lambda: m.diagonalize(True))
(P, D) = m.diagonalize(True)
def test_jordan_form():
m = Matrix(3, 2, [-3, 1, -3, 20, 3, 10])
raises(NonSquareMatrixError, lambda: m.jordan_form())
# diagonalizable
m = Matrix(3, 3, [7, -12, 6, 10, -19, 10, 12, -24, 13])
Jmust = Matrix(3, 3, [-1, 0, 0, 0, 1, 0, 0, 0, 1])
P, J = m.jordan_form()
assert Jmust == J
assert Jmust == m.diagonalize()[1]
# m = Matrix(3, 3, [0, 6, 3, 1, 3, 1, -2, 2, 1])
# m.jordan_form() # very long
# m.jordan_form() #
# diagonalizable, complex only
# Jordan cells
# complexity: one of eigenvalues is zero
m = Matrix(3, 3, [0, 1, 0, -4, 4, 0, -2, 1, 2])
# The blocks are ordered according to the value of their eigenvalues,
# in order to make the matrix compatible with .diagonalize()
Jmust = Matrix(3, 3, [2, 1, 0, 0, 2, 0, 0, 0, 2])
P, J = m.jordan_form()
assert Jmust == J
P, Jcells = m.jordan_cells()
# same here see 1456ff
assert Jcells[1] == Matrix(1, 1, [2])
assert Jcells[0] == Matrix(2, 2, [2, 1, 0, 2])
# complexity: all of eigenvalues are equal
m = Matrix(3, 3, [2, 6, -15, 1, 1, -5, 1, 2, -6])
# Jmust = Matrix(3, 3, [-1, 0, 0, 0, -1, 1, 0, 0, -1])
# same here see 1456ff
Jmust = Matrix(3, 3, [-1, 1, 0, 0, -1, 0, 0, 0, -1])
P, J = m.jordan_form()
assert Jmust == J
# complexity: two of eigenvalues are zero
m = Matrix(3, 3, [4, -5, 2, 5, -7, 3, 6, -9, 4])
Jmust = Matrix(3, 3, [0, 1, 0, 0, 0, 0, 0, 0, 1])
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [6, 5, -2, -3, -3, -1, 3, 3, 2, 1, -2, -3, -1, 1, 5, 5])
Jmust = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 0, 0,
0, 0, 2, 1,
0, 0, 0, 2]
)
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [6, 2, -8, -6, -3, 2, 9, 6, 2, -2, -8, -6, -1, 0, 3, 4])
# Jmust = Matrix(4, 4, [2, 0, 0, 0, 0, 2, 1, 0, 0, 0, 2, 0, 0, 0, 0, -2])
# same here see 1456ff
Jmust = Matrix(4, 4, [-2, 0, 0, 0,
0, 2, 1, 0,
0, 0, 2, 0,
0, 0, 0, 2])
P, J = m.jordan_form()
assert Jmust == J
m = Matrix(4, 4, [5, 4, 2, 1, 0, 1, -1, -1, -1, -1, 3, 0, 1, 1, -1, 2])
assert not m.is_diagonalizable()
Jmust = Matrix(4, 4, [1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 4, 1, 0, 0, 0, 4])
P, J = m.jordan_form()
assert Jmust == J
# the following tests are new and include (some) test the cases where the old
# algorithm failed due to the fact that the block structure can
# *NOT* be determined from algebraic and geometric multiplicity alone
# This can be seen most easily when one lets compute the J.c.f. of a matrix that
# is in J.c.f already.
m = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 1, 0,
0, 0, 2, 0,
0, 0, 0, 2
])
P, J = m.jordan_form()
assert m == J
m = Matrix(4, 4, [2, 1, 0, 0,
0, 2, 0, 0,
0, 0, 2, 1,
0, 0, 0, 2
])
P, J = m.jordan_form()
assert m == J
def test_Matrix_berkowitz_charpoly():
UA, K_i, K_w = symbols('UA K_i K_w')
A = Matrix([[-K_i - UA + K_i**2/(K_i + K_w), K_i*K_w/(K_i + K_w)],
[ K_i*K_w/(K_i + K_w), -K_w + K_w**2/(K_i + K_w)]])
charpoly = A.berkowitz_charpoly(x)
assert charpoly == \
Poly(x**2 + (K_i*UA + K_w*UA + 2*K_i*K_w)/(K_i + K_w)*x +
K_i*K_w*UA/(K_i + K_w), x, domain='ZZ(K_i,K_w,UA)')
assert type(charpoly) is PurePoly
A = Matrix([[1, 3], [2, 0]])
assert A.charpoly() == A.charpoly(x) == PurePoly(x**2 - x - 6)
def test_exp():
m = Matrix([[3, 4], [0, -2]])
m_exp = Matrix([[exp(3), -4*exp(-2)/5 + 4*exp(3)/5], [0, exp(-2)]])
assert m.exp() == m_exp
assert exp(m) == m_exp
m = Matrix([[1, 0], [0, 1]])
assert m.exp() == Matrix([[E, 0], [0, E]])
assert exp(m) == Matrix([[E, 0], [0, E]])
def test_has():
A = Matrix(((x, y), (2, 3)))
assert A.has(x)
assert not A.has(z)
assert A.has(Symbol)
A = A.subs(x, 2)
assert not A.has(x)
def test_errors():
raises(ValueError, lambda: Matrix([[1, 2], [1]]))
raises(IndexError, lambda: Matrix([[1, 2]])[1.2, 5])
raises(IndexError, lambda: Matrix([[1, 2]])[1, 5.2])
raises(ValueError, lambda: randMatrix(3, c=4, symmetric=True))
raises(ValueError, lambda: Matrix([1, 2]).reshape(4, 6))
raises(ShapeError,
lambda: Matrix([[1, 2], [3, 4]]).copyin_matrix([1, 0], Matrix([1, 2])))
raises(TypeError, lambda: Matrix([[1, 2], [3, 4]]).copyin_list([0,
1], set([])))
raises(NonSquareMatrixError, lambda: Matrix([[1, 2, 3], [2, 3, 0]]).inv())
raises(ShapeError,
lambda: Matrix(1, 2, [1, 2]).row_join(Matrix([[1, 2], [3, 4]])))
raises(
ShapeError, lambda: Matrix([1, 2]).col_join(Matrix([[1, 2], [3, 4]])))
raises(ShapeError, lambda: Matrix([1]).row_insert(1, Matrix([[1,
2], [3, 4]])))
raises(ShapeError, lambda: Matrix([1]).col_insert(1, Matrix([[1,
2], [3, 4]])))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).trace())
raises(TypeError, lambda: Matrix([1]).applyfunc(1))
raises(ShapeError, lambda: Matrix([1]).LUsolve(Matrix([[1, 2], [3, 4]])))
raises(MatrixError, lambda: Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]
]).QRdecomposition())
raises(MatrixError, lambda: Matrix(1, 2, [1, 2]).QRdecomposition())
raises(
NonSquareMatrixError, lambda: Matrix([1, 2]).LUdecomposition_Simple())
raises(ValueError, lambda: Matrix([[1, 2], [3, 4]]).minorEntry(4, 5))
raises(ValueError, lambda: Matrix([[1, 2], [3, 4]]).minorMatrix(4, 5))
raises(TypeError, lambda: Matrix([1, 2, 3]).cross(1))
raises(TypeError, lambda: Matrix([1, 2, 3]).dot(1))
raises(ShapeError, lambda: Matrix([1, 2, 3]).dot(Matrix([1, 2])))
raises(ShapeError, lambda: Matrix([1, 2]).dot([]))
raises(TypeError, lambda: Matrix([1, 2]).dot('a'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2, 3]).exp())
raises(ShapeError, lambda: Matrix([[1, 2], [3, 4]]).normalized())
raises(ValueError, lambda: Matrix([1, 2]).inv(method='not a method'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_GE())
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inverse_GE())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_ADJ())
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inverse_ADJ())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).inverse_LU())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).is_nilpotent())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).det())
raises(ValueError,
lambda: Matrix([[1, 2], [3, 4]]).det(method='Not a real method'))
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).det_bareis())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).berkowitz())
raises(NonSquareMatrixError, lambda: Matrix([1, 2]).berkowitz_det())
raises(ValueError,
lambda: hessian(Matrix([[1, 2], [3, 4]]), Matrix([[1, 2], [2, 1]])))
raises(ValueError, lambda: hessian(Matrix([[1, 2], [3, 4]]), []))
raises(ValueError, lambda: hessian(Symbol('x')**2, 'a'))
raises(ValueError,
lambda: Matrix([[5, 10, 7], [0, -1, 2], [8, 3, 4]]
).LUdecomposition_Simple(iszerofunc=lambda x: abs(x) <= 4))
raises(NotImplementedError, lambda: Matrix([[1, 0], [1, 1]])**(S(1)/2))
raises(NotImplementedError,
lambda: Matrix([[1, 2, 3], [4, 5, 6], [7, 8, 9]])**(0.5))
raises(IndexError, lambda: eye(3)[5, 2])
raises(IndexError, lambda: eye(3)[2, 5])
M = Matrix(((1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16)))
raises(ValueError, lambda: M.det('method=LU_decomposition()'))
def test_len():
assert len(Matrix()) == 0
assert len(Matrix([[1, 2]])) == len(Matrix([[1], [2]])) == 2
assert len(Matrix(0, 2, lambda i, j: 0)) == \
len(Matrix(2, 0, lambda i, j: 0)) == 0
assert len(Matrix([[0, 1, 2], [3, 4, 5]])) == 6
assert Matrix([1]) == Matrix([[1]])
assert not Matrix()
assert Matrix() == Matrix([])
def test_integrate():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2)))
assert A.integrate(x) == \
Matrix(((x, 4*x, x**2/2), (x*y, 2*x, 4*x), (10*x, 5*x, x**3/3)))
assert A.integrate(y) == \
Matrix(((y, 4*y, x*y), (y**2/2, 2*y, 4*y), (10*y, 5*y, y*x**2)))
def test_limit():
A = Matrix(((1, 4, sin(x)/x), (y, 2, 4), (10, 5, x**2 + 1)))
assert A.limit(x, 0) == Matrix(((1, 4, 1), (y, 2, 4), (10, 5, 1)))
def test_diff():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2 + 1)))
assert A.diff(x) == Matrix(((0, 0, 1), (0, 0, 0), (0, 0, 2*x)))
assert A.diff(y) == Matrix(((0, 0, 0), (1, 0, 0), (0, 0, 0)))
def test_getattr():
A = Matrix(((1, 4, x), (y, 2, 4), (10, 5, x**2 + 1)))
raises(AttributeError, lambda: A.nonexistantattribute)
assert getattr(A, 'diff')(x) == Matrix(((0, 0, 1), (0, 0, 0), (0, 0, 2*x)))
def test_hessenberg():
A = Matrix([[3, 4, 1], [2, 4, 5], [0, 1, 2]])
assert A.is_upper_hessenberg
A = A.T
assert A.is_lower_hessenberg
A[0, -1] = 1
assert A.is_lower_hessenberg is False
A = Matrix([[3, 4, 1], [2, 4, 5], [3, 1, 2]])
assert not A.is_upper_hessenberg
def test_cholesky():
raises(NonSquareMatrixError, lambda: Matrix((1, 2)).cholesky())
raises(ValueError, lambda: Matrix(((1, 2), (3, 4))).cholesky())
A = Matrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
assert A.cholesky() * A.cholesky().T == A
assert A.cholesky().is_lower
assert A.cholesky() == Matrix([[5, 0, 0], [3, 3, 0], [-1, 1, 3]])
def test_LDLdecomposition():
raises(NonSquareMatrixError, lambda: Matrix((1, 2)).LDLdecomposition())
raises(ValueError, lambda: Matrix(((1, 2), (3, 4))).LDLdecomposition())
A = Matrix(((25, 15, -5), (15, 18, 0), (-5, 0, 11)))
L, D = A.LDLdecomposition()
assert L * D * L.T == A
assert L.is_lower
assert L == Matrix([[1, 0, 0], [ S(3)/5, 1, 0], [S(-1)/5, S(1)/3, 1]])
assert D.is_diagonal()
assert D == Matrix([[25, 0, 0], [0, 9, 0], [0, 0, 9]])
def test_cholesky_solve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.cholesky_solve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.cholesky_solve(b)
assert soln == x
def test_LDLsolve():
A = Matrix([[2, 3, 5],
[3, 6, 2],
[8, 3, 6]])
x = Matrix(3, 1, [3, 7, 5])
b = A*x
soln = A.LDLsolve(b)
assert soln == x
A = Matrix([[0, -1, 2],
[5, 10, 7],
[8, 3, 4]])
x = Matrix(3, 1, [-1, 2, 5])
b = A*x
soln = A.LDLsolve(b)
assert soln == x
def test_lower_triangular_solve():
raises(NonSquareMatrixError,
lambda: Matrix([1, 0]).lower_triangular_solve(Matrix([0, 1])))
raises(ShapeError,
lambda: Matrix([[1, 0], [0, 1]]).lower_triangular_solve(Matrix([1])))
raises(ValueError,
lambda: Matrix([[2, 1], [1, 2]]).lower_triangular_solve(
Matrix([[1, 0], [0, 1]])))
A = Matrix([[1, 0], [0, 1]])
B = Matrix([[x, y], [y, x]])
C = Matrix([[4, 8], [2, 9]])
assert A.lower_triangular_solve(B) == B
assert A.lower_triangular_solve(C) == C
def test_upper_triangular_solve():
raises(NonSquareMatrixError,
lambda: Matrix([1, 0]).upper_triangular_solve(Matrix([0, 1])))
raises(TypeError,
lambda: Matrix([[1, 0], [0, 1]]).upper_triangular_solve(Matrix([1])))
raises(TypeError,
lambda: Matrix([[2, 1], [1, 2]]).upper_triangular_solve(
Matrix([[1, 0], [0, 1]])))
A = Matrix([[1, 0], [0, 1]])
B = Matrix([[x, y], [y, x]])
C = Matrix([[2, 4], [3, 8]])
assert A.upper_triangular_solve(B) == B
assert A.upper_triangular_solve(C) == C
def test_diagonal_solve():
raises(TypeError, lambda: Matrix([1, 1]).diagonal_solve(Matrix([1])))
A = Matrix([[1, 0], [0, 1]])*2
B = Matrix([[x, y], [y, x]])
assert A.diagonal_solve(B) == B/2
def test_matrix_norm():
# Vector Tests
# Test columns and symbols
x = Symbol('x', real=True)
v = Matrix([cos(x), sin(x)])
assert trigsimp(v.norm(2)) == 1
assert v.norm(10) == Pow(cos(x)**10 + sin(x)**10, S(1)/10)
# Test Rows
A = Matrix([[5, Rational(3, 2)]])
assert A.norm() == Pow(25 + Rational(9, 4), S(1)/2)
assert A.norm(oo) == max(A._mat)
assert A.norm(-oo) == min(A._mat)
# Matrix Tests
# Intuitive test
A = Matrix([[1, 1], [1, 1]])
assert A.norm(2) == 2
assert A.norm(-2) == 0
assert A.norm('frobenius') == 2
assert eye(10).norm(2) == eye(10).norm(-2) == 1
# Test with Symbols and more complex entries
A = Matrix([[3, y, y], [x, S(1)/2, -pi]])
assert (A.norm('fro')
== sqrt(S(37)/4 + 2*abs(y)**2 + pi**2 + x**2))
# Check non-square
A = Matrix([[1, 2, -3], [4, 5, Rational(13, 2)]])
assert A.norm(2) == sqrt(S(389)/8 + sqrt(78665)/8)
assert A.norm(-2) == S(0)
assert A.norm('frobenius') == sqrt(389)/2
# Test properties of matrix norms
# http://en.wikipedia.org/wiki/Matrix_norm#Definition
# Two matrices
A = Matrix([[1, 2], [3, 4]])
B = Matrix([[5, 5], [-2, 2]])
C = Matrix([[0, -I], [I, 0]])
D = Matrix([[1, 0], [0, -1]])
L = [A, B, C, D]
alpha = Symbol('alpha', real=True)
for order in ['fro', 2, -2]:
# Zero Check
assert zeros(3).norm(order) == S(0)
# Check Triangle Inequality for all Pairs of Matrices
for X in L:
for Y in L:
assert simplify(X.norm(order) + Y.norm(order) >=
(X + Y).norm(order))
# Scalar multiplication linearity
for M in [A, B, C, D]:
if order in [2, -2]:
# Abs is causing tests to fail when Abs(alpha) is inside a Max
# or Min. The tests produce mathematically true statements that
# are too complex to be simplified well.
continue
try:
assert ((alpha*M).norm(order) ==
abs(alpha) * M.norm(order))
except NotImplementedError:
pass # Some Norms fail on symbolic matrices due to Max issue
# Test Properties of Vector Norms
# http://en.wikipedia.org/wiki/Vector_norm
# Two column vectors
a = Matrix([1, 1 - 1*I, -3])
b = Matrix([S(1)/2, 1*I, 1])
c = Matrix([-1, -1, -1])
d = Matrix([3, 2, I])
e = Matrix([Integer(1e2), Rational(1, 1e2), 1])
L = [a, b, c, d, e]
alpha = Symbol('alpha', real=True)
for order in [1, 2, -1, -2, S.Infinity, S.NegativeInfinity, pi]:
# Zero Check
if order > 0:
assert Matrix([0, 0, 0]).norm(order) == S(0)
# Triangle inequality on all pairs
if order >= 1: # Triangle InEq holds only for these norms
for v in L:
for w in L:
assert simplify(v.norm(order) + w.norm(order) >=
(v + w).norm(order))
# Linear to scalar multiplication
if order in [1, 2, -1, -2, S.Infinity, S.NegativeInfinity]:
for vec in L:
try:
assert simplify((alpha*v).norm(order) -
(abs(alpha) * v.norm(order))) == 0
except NotImplementedError:
pass # Some Norms fail on symbolics due to Max issue
def test_singular_values():
x = Symbol('x', real=True)
A = Matrix([[0, 1*I], [2, 0]])
assert A.singular_values() == [2, 1]
A = eye(3)
A[1, 1] = x
A[2, 2] = 5
vals = A.singular_values()
assert 1 in vals and 5 in vals and abs(x) in vals
A = Matrix([[sin(x), cos(x)], [-cos(x), sin(x)]])
vals = [sv.trigsimp() for sv in A.singular_values()]
assert vals == [S(1), S(1)]
def test_condition_number():
x = Symbol('x', real=True)
A = eye(3)
A[0, 0] = 10
A[2, 2] = S(1)/10
assert A.condition_number() == 100
A[1, 1] = x
assert A.condition_number() == Max(10, Abs(x)) / Min(S(1)/10, Abs(x))
M = Matrix([[cos(x), sin(x)], [-sin(x), cos(x)]])
Mc = M.condition_number()
assert all(Float(1.).epsilon_eq(Mc.subs(x, val).evalf()) for val in
[Rational(1, 5), Rational(1, 2), Rational(1, 10), pi/2, pi, 7*pi/4 ])
def test_equality():
A = Matrix(((1, 2, 3), (4, 5, 6), (7, 8, 9)))
B = Matrix(((9, 8, 7), (6, 5, 4), (3, 2, 1)))
assert A == A[:, :]
assert not A != A[:, :]
assert not A == B
assert A != B
assert A != 10
assert not A == 10
# A SparseMatrix can be equal to a Matrix
C = SparseMatrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
D = Matrix(((1, 0, 0), (0, 1, 0), (0, 0, 1)))
assert C == D
assert not C != D
def test_col_join():
assert eye(3).col_join(Matrix([[7, 7, 7]])) == \
Matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[7, 7, 7]])
def test_row_insert():
r4 = Matrix([[4, 4, 4]])
for i in range(-4, 5):
l = [1, 0, 0]
l.insert(i, 4)
assert flatten(eye(3).row_insert(i, r4).col(0).tolist()) == l
def test_col_insert():
c4 = Matrix([4, 4, 4])
for i in range(-4, 5):
l = [0, 0, 0]
l.insert(i, 4)
assert flatten(zeros(3).col_insert(i, c4).row(0).tolist()) == l
def test_normalized():
assert Matrix([3, 4]).normalized() == \
Matrix([Rational(3, 5), Rational(4, 5)])
def test_print_nonzero():
assert capture(lambda: eye(3).print_nonzero()) == \
'[X ]\n[ X ]\n[ X]\n'
assert capture(lambda: eye(3).print_nonzero('.')) == \
'[. ]\n[ . ]\n[ .]\n'
def test_zeros_eye():
assert Matrix.eye(3) == eye(3)
assert Matrix.zeros(3) == zeros(3)
assert ones(3, 4) == Matrix(3, 4, [1]*12)
i = Matrix([[1, 0], [0, 1]])
z = Matrix([[0, 0], [0, 0]])
for cls in classes:
m = cls.eye(2)
assert i == m # but m == i will fail if m is immutable
assert i == eye(2, cls=cls)
assert type(m) == cls
m = cls.zeros(2)
assert z == m
assert z == zeros(2, cls=cls)
assert type(m) == cls
def test_is_zero():
assert Matrix().is_zero
assert Matrix([[0, 0], [0, 0]]).is_zero
assert zeros(3, 4).is_zero
assert not eye(3).is_zero
assert Matrix([[x, 0], [0, 0]]).is_zero == None
assert SparseMatrix([[x, 0], [0, 0]]).is_zero == None
assert ImmutableMatrix([[x, 0], [0, 0]]).is_zero == None
assert ImmutableSparseMatrix([[x, 0], [0, 0]]).is_zero == None
assert Matrix([[x, 1], [0, 0]]).is_zero == False
a = Symbol('a', nonzero=True)
assert Matrix([[a, 0], [0, 0]]).is_zero == False
def test_rotation_matrices():
# This tests the rotation matrices by rotating about an axis and back.
theta = pi/3
r3_plus = rot_axis3(theta)
r3_minus = rot_axis3(-theta)
r2_plus = rot_axis2(theta)
r2_minus = rot_axis2(-theta)
r1_plus = rot_axis1(theta)
r1_minus = rot_axis1(-theta)
assert r3_minus*r3_plus*eye(3) == eye(3)
assert r2_minus*r2_plus*eye(3) == eye(3)
assert r1_minus*r1_plus*eye(3) == eye(3)
# Check the correctness of the trace of the rotation matrix
assert r1_plus.trace() == 1 + 2*cos(theta)
assert r2_plus.trace() == 1 + 2*cos(theta)
assert r3_plus.trace() == 1 + 2*cos(theta)
# Check that a rotation with zero angle doesn't change anything.
assert rot_axis1(0) == eye(3)
assert rot_axis2(0) == eye(3)
assert rot_axis3(0) == eye(3)
def test_DeferredVector():
assert str(DeferredVector("vector")[4]) == "vector[4]"
assert sympify(DeferredVector("d")) == DeferredVector("d")
def test_DeferredVector_not_iterable():
assert not iterable(DeferredVector('X'))
def test_DeferredVector_Matrix():
raises(TypeError, lambda: Matrix(DeferredVector("V")))
def test_GramSchmidt():
R = Rational
m1 = Matrix(1, 2, [1, 2])
m2 = Matrix(1, 2, [2, 3])
assert GramSchmidt([m1, m2]) == \
[Matrix(1, 2, [1, 2]), Matrix(1, 2, [R(2)/5, R(-1)/5])]
assert GramSchmidt([m1.T, m2.T]) == \
[Matrix(2, 1, [1, 2]), Matrix(2, 1, [R(2)/5, R(-1)/5])]
# from wikipedia
assert GramSchmidt([Matrix([3, 1]), Matrix([2, 2])], True) == [
Matrix([3*sqrt(10)/10, sqrt(10)/10]),
Matrix([-sqrt(10)/10, 3*sqrt(10)/10])]
def test_casoratian():
assert casoratian([1, 2, 3, 4], 1) == 0
assert casoratian([1, 2, 3, 4], 1, zero=False) == 0
def test_zero_dimension_multiply():
assert (Matrix()*zeros(0, 3)).shape == (0, 3)
assert zeros(3, 0)*zeros(0, 3) == zeros(3, 3)
assert zeros(0, 3)*zeros(3, 0) == Matrix()
def test_slice_issue_2884():
m = Matrix(2, 2, range(4))
assert m[1, :] == Matrix([[2, 3]])
assert m[-1, :] == Matrix([[2, 3]])
assert m[:, 1] == Matrix([[1, 3]]).T
assert m[:, -1] == Matrix([[1, 3]]).T
raises(IndexError, lambda: m[2, :])
raises(IndexError, lambda: m[2, 2])
def test_slice_issue_3401():
assert zeros(0, 3)[:, -1].shape == (0, 1)
assert zeros(3, 0)[0, :] == Matrix(1, 0, [])
def test_copyin():
s = zeros(3, 3)
s[3] = 1
assert s[:, 0] == Matrix([0, 1, 0])
assert s[3] == 1
assert s[3: 4] == [1]
s[1, 1] = 42
assert s[1, 1] == 42
assert s[1, 1:] == Matrix([[42, 0]])
s[1, 1:] = Matrix([[5, 6]])
assert s[1, :] == Matrix([[1, 5, 6]])
s[1, 1:] = [[42, 43]]
assert s[1, :] == Matrix([[1, 42, 43]])
s[0, 0] = 17
assert s[:, :1] == Matrix([17, 1, 0])
s[0, 0] = [1, 1, 1]
assert s[:, 0] == Matrix([1, 1, 1])
s[0, 0] = Matrix([1, 1, 1])
assert s[:, 0] == Matrix([1, 1, 1])
s[0, 0] = SparseMatrix([1, 1, 1])
assert s[:, 0] == Matrix([1, 1, 1])
def test_invertible_check():
# sometimes a singular matrix will have a pivot vector shorter than
# the number of rows in a matrix...
assert Matrix([[1, 2], [1, 2]]).rref() == (Matrix([[1, 2], [0, 0]]), [0])
raises(ValueError, lambda: Matrix([[1, 2], [1, 2]]).inv())
# ... but sometimes it won't, so that is an insufficient test of
# whether something is invertible.
m = Matrix([
[-1, -1, 0],
[ x, 1, 1],
[ 1, x, -1],
])
assert len(m.rref()[1]) == m.rows
# in addition, unless simplify=True in the call to rref, the identity
# matrix will be returned even though m is not invertible
assert m.rref()[0] == eye(3)
assert m.rref(simplify=signsimp)[0] != eye(3)
raises(ValueError, lambda: m.inv(method="ADJ"))
raises(ValueError, lambda: m.inv(method="GE"))
raises(ValueError, lambda: m.inv(method="LU"))
@XFAIL
def test_issue_3959():
x, y = symbols('x, y')
e = x*y
assert e.subs(x, Matrix([3, 5, 3])) == Matrix([3, 5, 3])*y
def test_issue_5964():
assert str(Matrix([[1, 2], [3, 4]])) == 'Matrix([[1, 2], [3, 4]])'
def test_issue_7604():
x, y = symbols(u("x y"))
assert sstr(Matrix([[x, 2*y], [y**2, x + 3]])) == \
'Matrix([\n[ x, 2*y],\n[y**2, x + 3]])'
def test_is_Identity():
assert eye(3).is_Identity
assert eye(3).as_immutable().is_Identity
assert not zeros(3).is_Identity
assert not ones(3).is_Identity
# issue 6242
assert not Matrix([[1, 0, 0]]).is_Identity
# issue 8854
assert SparseMatrix(3,3, {(0,0):1, (1,1):1, (2,2):1}).is_Identity
assert not SparseMatrix(2,3, range(6)).is_Identity
assert not SparseMatrix(3,3, {(0,0):1, (1,1):1}).is_Identity
assert not SparseMatrix(3,3, {(0,0):1, (1,1):1, (2,2):1, (0,1):2, (0,2):3}).is_Identity
def test_dot():
assert ones(1, 3).dot(ones(3, 1)) == 3
assert ones(1, 3).dot([1, 1, 1]) == 3
def test_dual():
B_x, B_y, B_z, E_x, E_y, E_z = symbols(
'B_x B_y B_z E_x E_y E_z', real=True)
F = Matrix((
( 0, E_x, E_y, E_z),
(-E_x, 0, B_z, -B_y),
(-E_y, -B_z, 0, B_x),
(-E_z, B_y, -B_x, 0)
))
Fd = Matrix((
( 0, -B_x, -B_y, -B_z),
(B_x, 0, E_z, -E_y),
(B_y, -E_z, 0, E_x),
(B_z, E_y, -E_x, 0)
))
assert F.dual().equals(Fd)
assert eye(3).dual().equals(zeros(3))
assert F.dual().dual().equals(-F)
def test_anti_symmetric():
assert Matrix([1, 2]).is_anti_symmetric() is False
m = Matrix(3, 3, [0, x**2 + 2*x + 1, y, -(x + 1)**2, 0, x*y, -y, -x*y, 0])
assert m.is_anti_symmetric() is True
assert m.is_anti_symmetric(simplify=False) is False
assert m.is_anti_symmetric(simplify=lambda x: x) is False
# tweak to fail
m[2, 1] = -m[2, 1]
assert m.is_anti_symmetric() is False
# untweak
m[2, 1] = -m[2, 1]
m = m.expand()
assert m.is_anti_symmetric(simplify=False) is True
m[0, 0] = 1
assert m.is_anti_symmetric() is False
def test_normalize_sort_diogonalization():
A = Matrix(((1, 2), (2, 1)))
P, Q = A.diagonalize(normalize=True)
assert P*P.T == P.T*P == eye(P.cols)
P, Q = A.diagonalize(normalize=True, sort=True)
assert P*P.T == P.T*P == eye(P.cols)
assert P*Q*P.inv() == A
def test_issue_5321():
raises(ValueError, lambda: Matrix([[1, 2, 3], Matrix(0, 1, [])]))
def test_issue_5320():
assert Matrix.hstack(eye(2), 2*eye(2)) == Matrix([
[1, 0, 2, 0],
[0, 1, 0, 2]
])
assert Matrix.vstack(eye(2), 2*eye(2)) == Matrix([
[1, 0],
[0, 1],
[2, 0],
[0, 2]
])
cls = SparseMatrix
assert cls.hstack(cls(eye(2)), cls(2*eye(2))) == Matrix([
[1, 0, 2, 0],
[0, 1, 0, 2]
])
def test_cross():
a = [1, 2, 3]
b = [3, 4, 5]
col = Matrix([-2, 4, -2])
row = col.T
def test(M, ans):
assert ans == M
assert type(M) == cls
for cls in classes:
A = cls(a)
B = cls(b)
test(A.cross(B), col)
test(A.cross(B.T), col)
test(A.T.cross(B.T), row)
test(A.T.cross(B), row)
raises(ShapeError, lambda:
Matrix(1, 2, [1, 1]).cross(Matrix(1, 2, [1, 1])))
def test_hash():
for cls in classes[-2:]:
s = set([cls.eye(1), cls.eye(1)])
assert len(s) == 1 and s.pop() == cls.eye(1)
# issue 3979
for cls in classes[:2]:
assert not isinstance(cls.eye(1), collections.Hashable)
@XFAIL
def test_issue_3979():
# when this passes, delete this and change the [1:2]
# to [:2] in the test_hash above for issue 3979
cls = classes[0]
raises(AttributeError, lambda: hash(cls.eye(1)))
def test_adjoint():
dat = [[0, I], [1, 0]]
ans = Matrix([[0, 1], [-I, 0]])
for cls in classes:
assert ans == cls(dat).adjoint()
def test_simplify_immutable():
from sympy import simplify, sin, cos
assert simplify(ImmutableMatrix([[sin(x)**2 + cos(x)**2]])) == \
ImmutableMatrix([[1]])
def test_rank():
from sympy.abc import x
m = Matrix([[1, 2], [x, 1 - 1/x]])
assert m.rank() == 2
n = Matrix(3, 3, range(1, 10))
assert n.rank() == 2
p = zeros(3)
assert p.rank() == 0
def test_replace():
from sympy import symbols, Function, Matrix
F, G = symbols('F, G', cls=Function)
K = Matrix(2, 2, lambda i, j: G(i+j))
M = Matrix(2, 2, lambda i, j: F(i+j))
N = M.replace(F, G)
assert N == K
def test_replace_map():
from sympy import symbols, Function, Matrix
F, G = symbols('F, G', cls=Function)
K = Matrix(2, 2, [(G(0), {F(0): G(0)}), (G(1), {F(1): G(1)}), (G(1), {F(1)\
: G(1)}), (G(2), {F(2): G(2)})])
M = Matrix(2, 2, lambda i, j: F(i+j))
N = M.replace(F, G, True)
assert N == K
def test_atoms():
from sympy.abc import x
m = Matrix([[1, 2], [x, 1 - 1/x]])
assert m.atoms() == set([S(1),S(2),S(-1), x])
assert m.atoms(Symbol) == set([x])
@slow
def test_pinv():
from sympy.abc import a, b, c, d
# Pseudoinverse of an invertible matrix is the inverse.
A1 = Matrix([[a, b], [c, d]])
assert simplify(A1.pinv()) == simplify(A1.inv())
# Test the four properties of the pseudoinverse for various matrices.
As = [Matrix([[13, 104], [2212, 3], [-3, 5]]),
Matrix([[1, 7, 9], [11, 17, 19]]),
Matrix([a, b])]
for A in As:
A_pinv = A.pinv()
AAp = A * A_pinv
ApA = A_pinv * A
assert simplify(AAp * A) == A
assert simplify(ApA * A_pinv) == A_pinv
assert AAp.H == AAp
assert ApA.H == ApA
def test_pinv_solve():
# Fully determined system (unique result, identical to other solvers).
A = Matrix([[1, 5], [7, 9]])
B = Matrix([12, 13])
assert A.pinv_solve(B) == A.cholesky_solve(B)
assert A.pinv_solve(B) == A.LDLsolve(B)
assert A.pinv_solve(B) == Matrix([sympify('-43/26'), sympify('71/26')])
assert A * A.pinv() * B == B
# Fully determined, with two-dimensional B matrix.
B = Matrix([[12, 13, 14], [15, 16, 17]])
assert A.pinv_solve(B) == A.cholesky_solve(B)
assert A.pinv_solve(B) == A.LDLsolve(B)
assert A.pinv_solve(B) == Matrix([[-33, -37, -41], [69, 75, 81]]) / 26
assert A * A.pinv() * B == B
# Underdetermined system (infinite results).
A = Matrix([[1, 0, 1], [0, 1, 1]])
B = Matrix([5, 7])
solution = A.pinv_solve(B)
w = {}
for s in solution.atoms(Symbol):
# Extract dummy symbols used in the solution.
w[s.name] = s
assert solution == Matrix([[w['w0_0']/3 + w['w1_0']/3 - w['w2_0']/3 + 1],
[w['w0_0']/3 + w['w1_0']/3 - w['w2_0']/3 + 3],
[-w['w0_0']/3 - w['w1_0']/3 + w['w2_0']/3 + 4]])
assert A * A.pinv() * B == B
# Overdetermined system (least squares results).
A = Matrix([[1, 0], [0, 0], [0, 1]])
B = Matrix([3, 2, 1])
assert A.pinv_solve(B) == Matrix([3, 1])
# Proof the solution is not exact.
assert A * A.pinv() * B != B
@XFAIL
def test_pinv_rank_deficient():
# Test the four properties of the pseudoinverse for various matrices.
As = [Matrix([[1, 1, 1], [2, 2, 2]]),
Matrix([[1, 0], [0, 0]])]
for A in As:
A_pinv = A.pinv()
AAp = A * A_pinv
ApA = A_pinv * A
assert simplify(AAp * A) == A
assert simplify(ApA * A_pinv) == A_pinv
assert AAp.H == AAp
assert ApA.H == ApA
# Test solving with rank-deficient matrices.
A = Matrix([[1, 0], [0, 0]])
# Exact, non-unique solution.
B = Matrix([3, 0])
solution = A.pinv_solve(B)
w1 = solution.atoms(Symbol).pop()
assert w1.name == 'w1_0'
assert solution == Matrix([3, w1])
assert A * A.pinv() * B == B
# Least squares, non-unique solution.
B = Matrix([3, 1])
solution = A.pinv_solve(B)
w1 = solution.atoms(Symbol).pop()
assert w1.name == 'w1_0'
assert solution == Matrix([3, w1])
assert A * A.pinv() * B != B
def test_issue_7201():
assert ones(0, 1) + ones(0, 1) == Matrix(0, 1, [])
assert ones(1, 0) + ones(1, 0) == Matrix(1, 0, [])
def test_free_symbols():
for M in ImmutableMatrix, ImmutableSparseMatrix, Matrix, SparseMatrix:
assert M([[x], [0]]).free_symbols == set([x])
def test_from_ndarray():
"""See issue 7465."""
try:
from numpy import array
except ImportError:
skip('NumPy must be available to test creating matrices from ndarrays')
assert Matrix(array([1, 2, 3])) == Matrix([1, 2, 3])
assert Matrix(array([[1, 2, 3]])) == Matrix([[1, 2, 3]])
assert Matrix(array([[1, 2, 3], [4, 5, 6]])) == \
Matrix([[1, 2, 3], [4, 5, 6]])
assert Matrix(array([x, y, z])) == Matrix([x, y, z])
raises(NotImplementedError, lambda: Matrix(array([[
[1, 2], [3, 4]], [[5, 6], [7, 8]]])))
def test_hermitian():
a = Matrix([[1, I], [-I, 1]])
assert a.is_hermitian
a[0, 0] = 2*I
assert a.is_hermitian is False
a[0, 0] = x
assert a.is_hermitian is None
a[0, 1] = a[1, 0]*I
assert a.is_hermitian is False
| Sumith1896/sympy | sympy/matrices/tests/test_matrices.py | Python | bsd-3-clause | 73,441 |
try:
from django.utils import timezone as datetime
except ImportError:
from datetime import datetime
from django.contrib.auth.models import Group
from django.contrib.sites.models import Site
from django.db import models
from waffle.compat import User
class Flag(models.Model):
"""A feature flag.
Flags are active (or not) on a per-request basis.
"""
name = models.CharField(max_length=100,
help_text='The human/computer readable name.')
everyone = models.NullBooleanField(blank=True, help_text=(
'Flip this flag on (Yes) or off (No) for everyone, overriding all '
'other settings. Leave as Unknown to use normally.'))
percent = models.DecimalField(max_digits=3, decimal_places=1, null=True,
blank=True, help_text=(
'A number between 0.0 and 99.9 to indicate a percentage of users for '
'whom this flag will be active.'))
testing = models.BooleanField(default=False, help_text=(
'Allow this flag to be set for a session for user testing.'))
superusers = models.BooleanField(default=True, help_text=(
'Flag always active for superusers?'))
staff = models.BooleanField(default=False, help_text=(
'Flag always active for staff?'))
authenticated = models.BooleanField(default=False, help_text=(
'Flag always active for authenticate users?'))
languages = models.TextField(blank=True, default='', help_text=(
'Activate this flag for users with one of these languages (comma '
'separated list)'))
groups = models.ManyToManyField(Group, blank=True, help_text=(
'Activate this flag for these user groups.'))
users = models.ManyToManyField(User, blank=True, help_text=(
'Activate this flag for these users.'))
rollout = models.BooleanField(default=False, help_text=(
'Activate roll-out mode?'))
note = models.TextField(blank=True, help_text=(
'Note where this Flag is used.'))
created = models.DateTimeField(default=datetime.now, db_index=True,
help_text=('Date when this Flag was created.'))
modified = models.DateTimeField(default=datetime.now, help_text=(
'Date when this Flag was last modified.'))
site = models.ForeignKey(Site, blank=True, null=True, related_name='waffle_flags')
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.modified = datetime.now()
super(Flag, self).save(*args, **kwargs)
class Meta:
unique_together = ('name', 'site')
class Switch(models.Model):
"""A feature switch.
Switches are active, or inactive, globally.
"""
name = models.CharField(max_length=100,
help_text='The human/computer readable name.')
active = models.BooleanField(default=False, help_text=(
'Is this flag active?'))
note = models.TextField(blank=True, help_text=(
'Note where this Switch is used.'))
created = models.DateTimeField(default=datetime.now, db_index=True,
help_text=('Date when this Switch was created.'))
modified = models.DateTimeField(default=datetime.now, help_text=(
'Date when this Switch was last modified.'))
site = models.ForeignKey(Site, blank=True, null=True, related_name='waffle_switches')
def __unicode__(self):
return u'%s: %s' % (self.name, 'on' if self.active else 'off')
def save(self, *args, **kwargs):
self.modified = datetime.now()
super(Switch, self).save(*args, **kwargs)
class Meta:
verbose_name_plural = 'Switches'
unique_together = ('name', 'site')
class Sample(models.Model):
"""A sample is true some percentage of the time, but is not connected
to users or requests.
"""
name = models.CharField(max_length=100,
help_text='The human/computer readable name.')
percent = models.DecimalField(max_digits=4, decimal_places=1, help_text=(
'A number between 0.0 and 100.0 to indicate a percentage of the time '
'this sample will be active.'))
note = models.TextField(blank=True, help_text=(
'Note where this Sample is used.'))
created = models.DateTimeField(default=datetime.now, db_index=True,
help_text=('Date when this Sample was created.'))
modified = models.DateTimeField(default=datetime.now, help_text=(
'Date when this Sample was last modified.'))
site = models.ForeignKey(Site, blank=True, null=True, related_name='waffle_samples')
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.modified = datetime.now()
super(Sample, self).save(*args, **kwargs)
class Meta:
unique_together = ('name', 'site')
| TwigWorld/django-waffle | waffle/models.py | Python | bsd-3-clause | 4,814 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__doc__ = """Code by Benjamin S. Murphy
[email protected]
Dependencies:
numpy
scipy (scipy.optimize.minimize())
Functions:
_adjust_for_anisotropy(X, y, center, scaling, angle):
Returns X_adj array of adjusted data coordinates. Angles are CCW about
specified axes. Scaling is applied in rotated coordinate system.
initialize_variogram_model(x, y, z, variogram_model, variogram_model_parameters,
variogram_function, nlags):
Returns lags, semivariance, and variogram model parameters as a list.
initialize_variogram_model_3d(x, y, z, values, variogram_model,
variogram_model_parameters, variogram_function, nlags):
Returns lags, semivariance, and variogram model parameters as a list.
variogram_function_error(params, x, y, variogram_function):
Called by calculate_variogram_model.
calculate_variogram_model(lags, semivariance, variogram_model, variogram_function):
Returns variogram model parameters that minimize the RMSE between the specified
variogram function and the actual calculated variogram points.
krige(x, y, z, coords, variogram_function, variogram_model_parameters):
Function that solves the ordinary kriging system for a single specified point.
Returns the Z value and sigma squared for the specified coordinates.
krige_3d(x, y, z, vals, coords, variogram_function, variogram_model_parameters):
Function that solves the ordinary kriging system for a single specified point.
Returns the interpolated value and sigma squared for the specified coordinates.
find_statistics(x, y, z, variogram_funtion, variogram_model_parameters):
Returns the delta, sigma, and epsilon values for the variogram fit.
calcQ1(epsilon):
Returns the Q1 statistic for the variogram fit (see Kitanidis).
calcQ2(epsilon):
Returns the Q2 statistic for the variogram fit (see Kitanidis).
calc_cR(Q2, sigma):
Returns the cR statistic for the variogram fit (see Kitanidis).
great_circle_distance(lon1, lat1, lon2, lat2):
Returns the great circle distance between two arrays of points given in spherical
coordinates. Spherical coordinates are expected in degrees. Angle definition
follows standard longitude/latitude definition.
References:
[1] P.K. Kitanidis, Introduction to Geostatistcs: Applications in Hydrogeology,
(Cambridge University Press, 1997) 272 p.
[2] T. Vincenty, Direct and Inverse Solutions of Geodesics on the Ellipsoid
with Application of Nested Equations, Survey Review 23 (176),
(Directorate of Overseas Survey, Kingston Road, Tolworth, Surrey 1975)
Copyright (c) 2015 Benjamin S. Murphy
"""
import numpy as np
from scipy.optimize import minimize
def great_circle_distance(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between one or multiple
pairs of points on a unit sphere.
Parameters:
-----------
lon1: float scalar or numpy array
Longitude coordinate(s) of the first element(s) of the point
pair(s), given in degrees.
lat1: float scalar or numpy array
Latitude coordinate(s) of the first element(s) of the point
pair(s), given in degrees.
lon2: float scalar or numpy array
Longitude coordinate(s) of the second element(s) of the point
pair(s), given in degrees.
lat2: float scalar or numpy array
Latitude coordinate(s) of the second element(s) of the point
pair(s), given in degrees.
Calculation of distances follows numpy elementwise semantics, so if
an array of length N is passed, all input parameters need to be
arrays of length N or scalars.
Returns:
--------
distance: float
The great circle distance(s) (in degrees) between the
given pair(s) of points.
"""
# Convert to radians:
lat1 = np.array(lat1)*np.pi/180.0
lat2 = np.array(lat2)*np.pi/180.0
dlon = (lon1-lon2)*np.pi/180.0
# Evaluate trigonometric functions that need to be evaluated more
# than once:
c1 = np.cos(lat1)
s1 = np.sin(lat1)
c2 = np.cos(lat2)
s2 = np.sin(lat2)
cd = np.cos(dlon)
# This uses the arctan version of the great-circle distance function
# from en.wikipedia.org/wiki/Great-circle_distance for increased
# numerical stability.
# Formula can be obtained from [2] combining eqns. (14)-(16)
# for spherical geometry (f=0).
return 180.0/np.pi*np.arctan2(
np.sqrt((c2*np.sin(dlon))**2 +
(c1*s2-s1*c2*cd)**2),
s1*s2+c1*c2*cd)
def euclid3_to_great_circle(euclid3_distance):
"""
Convert euclidean distance between points on a unit sphere to
the corresponding great circle distance.
Parameters:
-----------
euclid3_distance: float scalar or numpy array
The euclidean three-space distance(s) between points on a
unit sphere, thus between [0,2].
Returns:
--------
great_circle_dist: float scalar or numpy array
The corresponding great circle distance(s) between the
points.
"""
# Eliminate some possible numerical errors:
euclid3_distance[euclid3_distance>2.0] = 2.0
return 180.0 - 360.0/np.pi*np.arccos(0.5*euclid3_distance)
def _adjust_for_anisotropy(X, center, scaling, angle):
"""Adjusts data coordinates to take into account anisotropy.
Can also be used to take into account data scaling.
Parameters
----------
X: ndarray
float array [n_samples, n_dim], the input array of coordinates
center: ndarray
float array [n_dim], the coordinate of centers
scaling: ndarray
float array [n_dim - 1], the scaling of last two dimensions
angle : ndarray
float array [2*n_dim - 3], the anysotropy angle (degrees)
Returns
-------
X_adj : ndarray
float array [n_samples, n_dim], the X array adjusted for anisotropy.
"""
center = np.asarray(center)[None, :]
angle = np.asarray(angle)*np.pi/180
X -= center
Ndim = X.shape[1]
if Ndim == 1:
raise NotImplementedError('Not implemnented yet?')
elif Ndim == 2:
stretch = np.array([[1, 0], [0, scaling[0]]])
rot_tot = np.array([[np.cos(-angle[0]), -np.sin(-angle[0])],
[np.sin(-angle[0]), np.cos(-angle[0])]])
elif Ndim == 3:
stretch = np.array([[1., 0., 0.], [0., scaling[0], 0.], [0., 0., scaling[1]]])
rotate_x = np.array([[1., 0., 0.],
[0., np.cos(-angle[0]), -np.sin(-angle[0])],
[0., np.sin(-angle[0]), np.cos(-angle[0])]])
rotate_y = np.array([[np.cos(-angle[1]), 0., np.sin(-angle[1])],
[0., 1., 0.],
[-np.sin(-angle[1]), 0., np.cos(-angle[1])]])
rotate_z = np.array([[np.cos(-angle[2]), -np.sin(-angle[2]), 0.],
[np.sin(-angle[2]), np.cos(-angle[2]), 0.],
[0., 0., 1.]])
rot_tot = np.dot(rotate_z, np.dot(rotate_y, rotate_x))
else:
raise ValueError("Adjust for anysotropy function doesn't support ND spaces where N>3")
X_adj = np.dot(stretch, np.dot(rot_tot, X.T)).T
X_adj += center
return X_adj
def initialize_variogram_model(x, y, z, variogram_model, variogram_model_parameters,
variogram_function, nlags, weight, coordinates_type):
"""Initializes the variogram model for kriging according
to user specifications or to defaults"""
x1, x2 = np.meshgrid(x, x, sparse=True)
y1, y2 = np.meshgrid(y, y, sparse=True)
z1, z2 = np.meshgrid(z, z, sparse=True)
dz = z1 - z2
if coordinates_type == 'euclidean':
dx = x1 - x2
dy = y1 - y2
d = np.sqrt(dx**2 + dy**2)
elif coordinates_type == 'geographic':
# Assume x => lon, y => lat
d = great_circle_distance(x1, y1, x2, y2)
g = 0.5 * dz**2
indices = np.indices(d.shape)
d = d[(indices[0, :, :] > indices[1, :, :])]
g = g[(indices[0, :, :] > indices[1, :, :])]
# Equal-sized bins are now implemented. The upper limit on the bins
# is appended to the list (instead of calculated as part of the
# list comprehension) to avoid any numerical oddities
# (specifically, say, ending up as 0.99999999999999 instead of 1.0).
# Appending dmax + 0.001 ensures that the largest distance value
# is included in the semivariogram calculation.
dmax = np.amax(d)
dmin = np.amin(d)
dd = (dmax - dmin)/nlags
bins = [dmin + n*dd for n in range(nlags)]
dmax += 0.001
bins.append(dmax)
# This old binning method was experimental and doesn't seem
# to work too well. Bins were computed such that there are more
# at shorter lags. This effectively weights smaller distances more
# highly in determining the variogram. As Kitanidis points out,
# the variogram fit to the data at smaller lag distances is more
# important. However, the value at the largest lag probably ends up
# being biased too high for the larger values and thereby throws off
# automatic variogram calculation and confuses comparison of the
# semivariogram with the variogram model.
#
# dmax = np.amax(d)
# dmin = np.amin(d)
# dd = dmax - dmin
# bins = [dd*(0.5**n) + dmin for n in range(nlags, 1, -1)]
# bins.insert(0, dmin)
# bins.append(dmax)
lags = np.zeros(nlags)
semivariance = np.zeros(nlags)
for n in range(nlags):
# This 'if... else...' statement ensures that there are data
# in the bin so that numpy can actually find the mean. If we
# don't test this first, then Python kicks out an annoying warning
# message when there is an empty bin and we try to calculate the mean.
if d[(d >= bins[n]) & (d < bins[n + 1])].size > 0:
lags[n] = np.mean(d[(d >= bins[n]) & (d < bins[n + 1])])
semivariance[n] = np.mean(g[(d >= bins[n]) & (d < bins[n + 1])])
else:
lags[n] = np.nan
semivariance[n] = np.nan
lags = lags[~np.isnan(semivariance)]
semivariance = semivariance[~np.isnan(semivariance)]
if variogram_model_parameters is not None:
if variogram_model == 'linear' and len(variogram_model_parameters) != 2:
raise ValueError("Exactly two parameters required "
"for linear variogram model")
elif (variogram_model == 'power' or variogram_model == 'spherical' or variogram_model == 'exponential'
or variogram_model == 'gaussian') and len(variogram_model_parameters) != 3:
raise ValueError("Exactly three parameters required "
"for %s variogram model" % variogram_model)
else:
if variogram_model == 'custom':
raise ValueError("Variogram parameters must be specified when implementing custom variogram model.")
else:
variogram_model_parameters = calculate_variogram_model(lags, semivariance, variogram_model,
variogram_function, weight)
return lags, semivariance, variogram_model_parameters
def initialize_variogram_model_3d(x, y, z, values, variogram_model, variogram_model_parameters,
variogram_function, nlags, weight):
"""Initializes the variogram model for kriging according
to user specifications or to defaults"""
x1, x2 = np.meshgrid(x, x, sparse=True)
y1, y2 = np.meshgrid(y, y, sparse=True)
z1, z2 = np.meshgrid(z, z, sparse=True)
val1, val2 = np.meshgrid(values, values)
d = np.sqrt((x1 - x2)**2 + (y1 - y2)**2 + (z1 - z2)**2)
g = 0.5 * (val1 - val2)**2
indices = np.indices(d.shape)
d = d[(indices[0, :, :] > indices[1, :, :])]
g = g[(indices[0, :, :] > indices[1, :, :])]
# The upper limit on the bins is appended to the list (instead of calculated as part of the
# list comprehension) to avoid any numerical oddities (specifically, say, ending up as
# 0.99999999999999 instead of 1.0). Appending dmax + 0.001 ensures that the largest distance value
# is included in the semivariogram calculation.
dmax = np.amax(d)
dmin = np.amin(d)
dd = (dmax - dmin)/nlags
bins = [dmin + n*dd for n in range(nlags)]
dmax += 0.001
bins.append(dmax)
lags = np.zeros(nlags)
semivariance = np.zeros(nlags)
for n in range(nlags):
# This 'if... else...' statement ensures that there are data in the bin so that numpy can actually
# find the mean. If we don't test this first, then Python kicks out an annoying warning message
# when there is an empty bin and we try to calculate the mean.
if d[(d >= bins[n]) & (d < bins[n + 1])].size > 0:
lags[n] = np.mean(d[(d >= bins[n]) & (d < bins[n + 1])])
semivariance[n] = np.mean(g[(d >= bins[n]) & (d < bins[n + 1])])
else:
lags[n] = np.nan
semivariance[n] = np.nan
lags = lags[~np.isnan(semivariance)]
semivariance = semivariance[~np.isnan(semivariance)]
if variogram_model_parameters is not None:
if variogram_model == 'linear' and len(variogram_model_parameters) != 2:
raise ValueError("Exactly two parameters required "
"for linear variogram model")
elif (variogram_model == 'power' or variogram_model == 'spherical' or variogram_model == 'exponential'
or variogram_model == 'gaussian') and len(variogram_model_parameters) != 3:
raise ValueError("Exactly three parameters required "
"for %s variogram model" % variogram_model)
else:
if variogram_model == 'custom':
raise ValueError("Variogram parameters must be specified when implementing custom variogram model.")
else:
variogram_model_parameters = calculate_variogram_model(lags, semivariance, variogram_model,
variogram_function, weight)
return lags, semivariance, variogram_model_parameters
def variogram_function_error(params, x, y, variogram_function, weight):
"""Function used to in fitting of variogram model.
Returns RMSE between calculated fit and actual data."""
diff = variogram_function(params, x) - y
if weight:
weights = np.arange(x.size, 0.0, -1.0)
weights /= np.sum(weights)
rmse = np.sqrt(np.average(diff**2, weights=weights))
else:
rmse = np.sqrt(np.mean(diff**2))
return rmse
def calculate_variogram_model(lags, semivariance, variogram_model, variogram_function, weight):
"""Function that fits a variogram model when parameters are not specified."""
if variogram_model == 'linear':
x0 = [(np.amax(semivariance) - np.amin(semivariance))/(np.amax(lags) - np.amin(lags)),
np.amin(semivariance)]
bnds = ((0.0, 1000000000.0), (0.0, np.amax(semivariance)))
elif variogram_model == 'power':
x0 = [(np.amax(semivariance) - np.amin(semivariance))/(np.amax(lags) - np.amin(lags)),
1.1, np.amin(semivariance)]
bnds = ((0.0, 1000000000.0), (0.01, 1.99), (0.0, np.amax(semivariance)))
else:
x0 = [np.amax(semivariance), 0.5*np.amax(lags), np.amin(semivariance)]
bnds = ((0.0, 10*np.amax(semivariance)), (0.0, np.amax(lags)), (0.0, np.amax(semivariance)))
res = minimize(variogram_function_error, x0, args=(lags, semivariance, variogram_function, weight),
method='SLSQP', bounds=bnds)
return res.x
def krige(x, y, z, coords, variogram_function, variogram_model_parameters, coordinates_type):
"""Sets up and solves the kriging matrix for the given coordinate pair.
This function is now only used for the statistics calculations."""
zero_index = None
zero_value = False
x1, x2 = np.meshgrid(x, x, sparse=True)
y1, y2 = np.meshgrid(y, y, sparse=True)
if coordinates_type == 'euclidean':
d = np.sqrt((x1 - x2)**2 + (y1 - y2)**2)
bd = np.sqrt((x - coords[0])**2 + (y - coords[1])**2)
elif coordinates_type == 'geographic':
d = great_circle_distance(x1, y1, x2, y2)
bd = great_circle_distance(x, y, coords[0]*np.ones(x.shape),
coords[1]*np.ones(y.shape))
if np.any(np.absolute(bd) <= 1e-10):
zero_value = True
zero_index = np.where(bd <= 1e-10)[0][0]
n = x.shape[0]
a = np.zeros((n+1, n+1))
a[:n, :n] = - variogram_function(variogram_model_parameters, d)
np.fill_diagonal(a, 0.0)
a[n, :] = 1.0
a[:, n] = 1.0
a[n, n] = 0.0
b = np.zeros((n+1, 1))
b[:n, 0] = - variogram_function(variogram_model_parameters, bd)
if zero_value:
b[zero_index, 0] = 0.0
b[n, 0] = 1.0
x_ = np.linalg.solve(a, b)
zinterp = np.sum(x_[:n, 0] * z)
sigmasq = np.sum(x_[:, 0] * -b[:, 0])
return zinterp, sigmasq
def krige_3d(x, y, z, vals, coords, variogram_function, variogram_model_parameters):
"""Sets up and solves the kriging matrix for the given coordinate pair.
This function is now only used for the statistics calculations."""
zero_index = None
zero_value = False
x1, x2 = np.meshgrid(x, x, sparse=True)
y1, y2 = np.meshgrid(y, y, sparse=True)
z1, z2 = np.meshgrid(z, z, sparse=True)
d = np.sqrt((x1 - x2)**2 + (y1 - y2)**2 + (z1 - z2)**2)
bd = np.sqrt((x - coords[0])**2 + (y - coords[1])**2 + (z - coords[2])**2)
if np.any(np.absolute(bd) <= 1e-10):
zero_value = True
zero_index = np.where(bd <= 1e-10)[0][0]
n = x.shape[0]
a = np.zeros((n+1, n+1))
a[:n, :n] = - variogram_function(variogram_model_parameters, d)
np.fill_diagonal(a, 0.0)
a[n, :] = 1.0
a[:, n] = 1.0
a[n, n] = 0.0
b = np.zeros((n+1, 1))
b[:n, 0] = - variogram_function(variogram_model_parameters, bd)
if zero_value:
b[zero_index, 0] = 0.0
b[n, 0] = 1.0
x_ = np.linalg.solve(a, b)
zinterp = np.sum(x_[:n, 0] * vals)
sigmasq = np.sum(x_[:, 0] * -b[:, 0])
return zinterp, sigmasq
def find_statistics(x, y, z, variogram_function, variogram_model_parameters, coordinates_type):
"""Calculates variogram fit statistics."""
delta = np.zeros(z.shape)
sigma = np.zeros(z.shape)
for n in range(z.shape[0]):
if n == 0:
delta[n] = 0.0
sigma[n] = 0.0
else:
z_, ss_ = krige(x[:n], y[:n], z[:n], (x[n], y[n]), variogram_function,
variogram_model_parameters, coordinates_type)
d = z[n] - z_
delta[n] = d
sigma[n] = np.sqrt(ss_)
delta = delta[1:]
sigma = sigma[1:]
epsilon = delta/sigma
return delta, sigma, epsilon
def find_statistics_3d(x, y, z, vals, variogram_function, variogram_model_parameters):
"""Calculates variogram fit statistics for 3D problems."""
delta = np.zeros(vals.shape)
sigma = np.zeros(vals.shape)
for n in range(z.shape[0]):
if n == 0:
delta[n] = 0.0
sigma[n] = 0.0
else:
val_, ss_ = krige_3d(x[:n], y[:n], z[:n], vals[:n], (x[n], y[n], z[n]),
variogram_function, variogram_model_parameters)
delta[n] = vals[n] - val_
sigma[n] = np.sqrt(ss_)
delta = delta[1:]
sigma = sigma[1:]
epsilon = delta/sigma
return delta, sigma, epsilon
def calcQ1(epsilon):
return abs(np.sum(epsilon)/(epsilon.shape[0] - 1))
def calcQ2(epsilon):
return np.sum(epsilon**2)/(epsilon.shape[0] - 1)
def calc_cR(Q2, sigma):
return Q2 * np.exp(np.sum(np.log(sigma**2))/sigma.shape[0])
| basaks/PyKrige | pykrige/core.py | Python | bsd-3-clause | 20,431 |
#!/usr/bin/env python2.7
# NOTE THIS NEEDS 2.6 as parser breaks with 2.5 :-)
import warnings
warnings.simplefilter("ignore",DeprecationWarning)
import os, sys, re, urllib2, string, socket
import htmlentitydefs
import mechanize
import html5lib
from html5lib import treebuilders
import lxml.html, lxml.etree
from lxml.cssselect import CSSSelector
socket.setdefaulttimeout(15)
class ParseException(Exception):
pass
##
# Removes HTML or XML character references and entities from a text string.
#
# @param text The HTML (or XML) source text.
# @return The plain text, as a Unicode string, if necessary.
def unescape(text):
def fixup(m):
text = m.group(0)
if text[:2] == "&#":
# character reference
try:
if text[:3] == "&#x":
return unichr(int(text[3:-1], 16))
else:
return unichr(int(text[2:-1]))
except ValueError:
pass
else:
# named entity
try:
text = unichr(htmlentitydefs.name2codepoint[text[1:-1]])
except KeyError:
pass
return text # leave as is
return re.sub("&#?\w+;", fixup, text).encode('utf-8')
#
# Strip off any institutional proxies we find
#
def canon_url(url):
# print "xxxxx url = %s" % url
m = re.match(r'http://[^/]*sciencedirect.com[^/]*/(science(\?_ob|/article).*$)', url)
if not m:
raise ParseException, "bad source url"
return "http://www.sciencedirect.com/" + m.group(1)
#
# Make up crossref metadata URL (just need the DOI)
#
def crossref_xml_url(doi):
url = "http://www.crossref.org/openurl/?id=doi:" + doi
url += "&noredirect=true"
# see http://www.crossref.org/help/Content/05_Interfacing_with_the_CrossRef_system/Using_the_Open_URL_Resolver.htm
# key is either "username:password" or "<email>"
key_file = os.environ.get("HOME") + "/.crossref-key"
if os.path.exists(key_file):
f = open(key_file)
key = f.read().strip()
f.close()
url += "&pid=" + key
url += "&format=unixref"
return url
#
# Try, by foul trickery, to get an abstract
# We're looking for HTML like this:
# <div class="articleText" style="display: inline;">
# <h3 class="h3">Abstract</h3>
# <p>An instrumented indentation technique...
#
def scrape_abstract(page):
root = lxml.html.fromstring(page)
#root = lxml.html.fromstring(html_data)
#links_lxml_res = root.cssselect("a.detailsViewLink")
#links_lxml = [link.get("href") for link in links_lxml_res]
#links_lxml = list(set(links_lxml))
abs = []
for div in root.cssselect("div.articleText"):
for h3 in div.cssselect("h3.h3"):
if h3.text and string.lower(h3.text) in ('abstract','summary'):
for p in div.cssselect("p"):
abs.append(p.xpath("string()"))
if len(abs) == 0:
for div in root.cssselect('div.svAbstract'):
for p in div.cssselect("p"):
abs.append(p.xpath("string()"))
if len(abs) == 0:
for div in root.cssselect('#articleContent'):
for p in div.cssselect("div.articleText_indent"):
abs.append(p.xpath("string()"))
abstract = ' '.join(abs)
abstract = re.sub('\n+',' ',abstract)
abstract = re.sub('\s+',' ',abstract)
# print "1================================================================="
# print abstract
# print "2================================================================="
return unescape(abstract)
#
# Just try to fetch the metadata from crossref
#
def handle(url):
cUrl = canon_url(url)
#print "%s => %s" % (url, cUrl)
cookies = mechanize.CookieJar()
browser = mechanize.Browser()
browser.addheaders = [("User-Agent", "Mozilla/5.0 (compatible; citeulike/1.0)"),
("From", "[email protected]")]
#browser.add_handler(PrettifyHandler())
browser.set_handle_robots(False)
browser.set_debug_http(False)
browser.set_debug_redirects(False)
browser.open(cUrl)
response = browser.response()
page = response.get_data()
# print page
#
# Elsevier insist on user selecting a "preferred source" when the article is
# available. This is normally stored in a cookie.
# If we get directed to the Elsevier "linking hub", find the 1st SD link on the
# and follow that.
# Yeah, I know - rubbish.
#
huburl = browser.geturl()
doi = None
m = re.search(r'linkinghub.elsevier.com/', huburl)
if m:
root = lxml.html.fromstring(page)
inputs = root.cssselect("input")
hrefs = [link.get("value") for link in inputs]
for href in hrefs:
n = re.search('sciencedirect.com',href)
if n:
browser.open(href)
response = browser.response()
page = response.get_data()
break
m = re.search(r'<a(?: id="[^"]+")? href="http://dx.doi.org/([^"]+)"', page)
# this page might requires a login. Luckily there seems to be a
# link "View Abstract" which can take us to a page we can read
if not m and not doi:
root = lxml.html.fromstring(page)
links = root.cssselect("a")
for href in [e.get("href") for e in links]:
if href:
m = re.search(r'http://dx.doi.org/([^"]+)', href)
if m:
break
if False:
parser = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("beautifulsoup"))
# print page
soup = parser.parse(page)
link = soup.find(text=re.compile(r"view abstract", re.I))
if link:
href = link.parent['href']
browser.open(href)
response = browser.response()
page = response.get_data()
m = re.search(r'<a(?: id="[^"]+")? href="http://dx.doi.org/([^"]+)"', page)
if m:
doi = m.group(1)
else:
root = lxml.html.fromstring(page)
doi_nodes = root.cssselect("#doi")
for n in [e.text for e in doi_nodes]:
doi = re.sub(r'doi:','',n)
break
if not doi:
m = re.search(r'/doi/(10\.\d\d\d\d)_([^/]+)/', page)
if m:
doi = "%s/%s" % (m.group(1), m.group(2))
if not doi:
raise ParseException, "Cannot find DOI in page"
# if not re.search(r'^10[.](1016|1006|1053)/',doi):
# raise ParseException, "Cannot find an Elsevier DOI (10.1006, 10.1016, 10.1053) DOI"
xml_url = crossref_xml_url(doi)
browser.open(xml_url)
response = browser.response()
xml_page = response.get_data()
xml_page = xml_page.decode('utf-8')
# Get rid of extraneous "stars" \u2606. Sometimes at end of title (hopefully
# they're never meant to be "real" elsewhere...)
xml_page = xml_page.replace(u'\u2606',' ')
m = re.search("not found in CrossRef", xml_page)
if m:
raise ParseException, "Unable to locate that DOI (%s) in crossref" % doi
yield "begin_tsv"
yield "use_crossref\t1"
yield "linkout\tDOI\t\t%s\t\t" % doi
abstract = scrape_abstract(page)
# try:
# abstract = scrape_abstract(page)
# except:
# abstract = ''
if abstract:
print "abstract\t%s" % (abstract)
yield "end_tsv"
yield "status\tok"
if __name__ == "__main__":
url = sys.stdin.readline().strip()
for line in handle(url):
print line.encode("utf-8")
sys.exit(0)
try:
for line in handle(url):
print line.encode("utf-8")
except Exception, e:
import traceback
line = traceback.tb_lineno(sys.exc_info()[2])
print "\t".join(["status", "error", "There was an internal error processing this request. Please report this to [email protected] quoting error code %d." % line])
raise
| OAButton/tricorder | plugins/python/sciencedirect.py | Python | bsd-3-clause | 7,168 |
# -*- coding: utf-8 -*-
''':class:`blade` mathing operations.'''
from math import fsum
from itertools import tee
from operator import truediv
from collections import deque, namedtuple
from stuf.six import next
from stuf.iterable import count
from stuf.collects import Counter
from .xslice import xslicer
Count = namedtuple('Count', 'least most overall')
MinMax = namedtuple('MinMax', 'min max')
def xaverage(iterable):
'''
Discover average value of numbers in `iterable`.
:argument iterable: iterable object
:return: a number
>>> from blade.xmath import xaverage
>>> xaverage([10, 40, 45])
31.666666666666668
'''
i1, i2 = tee(iterable)
return truediv(sum(i1, 0.0), count(i2))
def xcount(iterable):
'''
Discover how common each item in `iterable` is and the overall count of each
item in `iterable`.
:argument iterable: iterable object
:return: Collects :func:`~collections.namedtuple` ``Count(least=int,
most=int, overall=[(thing1, int), (thing2, int), ...])``
>>> from blade.xmath import xcount
>>> common = xcount([11, 3, 5, 11, 7, 3, 5, 11])
>>> # least common thing
>>> common.least
7
>>> # most common thing
>>> common.most
11
>>> # total count for every thing
>>> common.overall
[(11, 3), (3, 2), (5, 2), (7, 1)]
'''
cnt = Counter(iterable).most_common
commonality = cnt()
return Count(
# least common
commonality[:-2:-1][0][0],
# most common (mode)
cnt(1)[0][0],
# overall commonality
commonality,
)
def xmedian(iterable):
'''
Discover median value of numbers in `iterable`.
:argument iterable: iterable object
:return: a number
>>> from blade.xmath import xmedian
>>> xmedian([4, 5, 7, 2, 1])
4
>>> xmedian([4, 5, 7, 2, 1, 8])
4.5
'''
i1, i2 = tee(sorted(iterable))
result = truediv(count(i1) - 1, 2)
pint = int(result)
if result % 2 == 0:
return xslicer(i2, pint)
i3, i4 = tee(i2)
return truediv(xslicer(i3, pint) + xslicer(i4, pint + 1), 2)
def xminmax(iterable):
'''
Discover the minimum and maximum values among items in `iterable`.
:argument iterable: iterable object
:return: :func:`~collections.namedtuple` ``MinMAx(min=value, max=value)``.
>>> from blade.xmath import xminmax
>>> minmax = xminmax([1, 2, 4])
>>> minmax.min
1
>>> minmax.max
4
'''
i1, i2 = tee(iterable)
return MinMax(min(i1), max(i2))
def xinterval(iterable):
'''
Discover the length of the smallest interval that can contain the value of
every items in `iterable`.
:argument iterable: iterable object
:return: a number
>>> from blade.xmath import xinterval
>>> xinterval([3, 5, 7, 3, 11])
8
'''
i1, i2 = tee(sorted(iterable))
return deque(i1, maxlen=1).pop() - next(i2)
def xsum(iterable, start=0, precision=False):
'''
Discover the total value of adding `start` and items in `iterable` together.
:argument iterable: iterable object
:keyword start: starting number
:type start: :func:`int` or :func:`float`
:keyword bool precision: add floats with extended precision
>>> from blade.xmath import xsum
>>> # default behavior
>>> xsum([1, 2, 3])
6
>>> # with a starting mumber
>>> xsum([1, 2, 3], start=1)
7
>>> # add floating points with extended precision
>>> xsum([.1, .1, .1, .1, .1, .1, .1, .1], precision=True)
0.8
'''
return fsum(iterable) if precision else sum(iterable, start)
| lcrees/blade | blade/xmath.py | Python | bsd-3-clause | 3,615 |
"""Audit Services Classes."""
import logging
from .audit_records import AuditRecords
logging.debug("In the audit_services __init__.py file.")
__all__ = ["AuditRecords"]
| daxm/fmcapi | fmcapi/api_objects/audit_services/__init__.py | Python | bsd-3-clause | 172 |
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic import RedirectView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include('huxley.api.urls', app_name='api', namespace='api')),
url(r'^', include('huxley.www.urls', app_name='www', namespace='www')),
)
urlpatterns += patterns('',
url(r'^favicon\.ico$', RedirectView.as_view(url='/static/img/favicon.ico')),
)
| jmosky12/huxley | huxley/urls.py | Python | bsd-3-clause | 648 |
# Packet structure:
# 00 - 0011 0000 - Range
# 01 - 0011 0000 - Digit 4
# 02 - 0011 0000 - Digit 3
# 03 - 0011 0000 - Digit 2
# 04 - 0011 0000 - Digit 1
# 05 - 0011 0000 - Digit 0
# 06 - 0011 1011 - Function
# 07 - 0011 0000 - Status
# 08 - 0011 0000 - Option1
# 09 - 0011 0000 - Option2
# 10 - 0011 1010 - Option3
# 11 - 0011 0000 - Option4
# 12 - 0000 1101 - CR
# 13 - 0000 1010 - LF
import serial, signal
def handler(signum, frame):
print("Exiting...")
# Open serial port, 7 data bits, even parity, 19230 baud
port = serial.Serial("/dev/cu.SLAB_USBtoUART", 19230, 7, 'E')
signal.signal(signal.SIGTERM, handler)
while True:
buffer = bytearray(port.read(14))
if buffer[12] != 0x0D or buffer[13] != 0x0A:
c = ''
print "lost sync on " + buffer
while c != 0x0A:
c = port.read(1)
print "Syncing..." + bytearray(c)
print "Synced!"
# Get range
range = (buffer[0] & 0x0F)
# Determine mode
if buffer[6] == 0x30: mode = "A"
elif buffer[6] == 0x31: mode = "Diode"
elif buffer[6] == 0x32: mode = "Hz"
elif buffer[6] == 0x33: mode = "ohm"
elif buffer[6] == 0x35: mode = "Continuity"
elif buffer[6] == 0x36: mode = "F"
elif buffer[6] == 0x3B: mode = "V"
elif buffer[6] == 0x3D: mode = "uA"
elif buffer[6] == 0x3F: mode = "mA"
else:
mode = ''
print("Error in determining function: ", hex(buffer[6]))
if mode == "V" and range == 4:
range = 2
mode = "mV"
elif mode == "F" and range == 0:
range = 1
mode = "nF"
# Digit decoding!
number = (buffer[1] & 0x0F)
number += (buffer[2] & 0x0F) * 10 ** -1
number += (buffer[3] & 0x0F) * 10 ** -2
number += (buffer[4] & 0x0F) * 10 ** -3
number += (buffer[5] & 0x0F) * 10 ** -4
number *= 10 ** range
# Check sign!
if (buffer[7] & 0x04) >> 2:
number *= -1
if mode == "ohm" and (buffer[7] & 0x01):
print "O/L"
else:
print number, mode
| g5pw/Miscellaneous | ReadMeter.py | Python | bsd-3-clause | 2,005 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-28 03:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('metadata', '0006_tag'),
]
operations = [
migrations.AddField(
model_name='link',
name='tags',
field=models.ManyToManyField(to='metadata.Tag'),
),
]
| alphageek-xyz/site | metadata/migrations/0007_link_tags.py | Python | bsd-3-clause | 441 |
import os
import tempfile
import sys
from ..py30compat import unittest
from ..test_backend import BackendBasicTests
from ..util import random_string
from keyring.backends import file
class FileKeyringTests(BackendBasicTests):
def setUp(self):
super(FileKeyringTests, self).setUp()
self.keyring = self.init_keyring()
self.keyring.file_path = self.tmp_keyring_file = tempfile.mktemp()
def tearDown(self):
try:
os.unlink(self.tmp_keyring_file)
except (OSError,):
e = sys.exc_info()[1]
if e.errno != 2: # No such file or directory
raise
def test_encrypt_decrypt(self):
password = random_string(20)
# keyring.encrypt expects bytes
password = password.encode('utf-8')
encrypted = self.keyring.encrypt(password)
self.assertEqual(password, self.keyring.decrypt(encrypted))
class UncryptedFileKeyringTestCase(FileKeyringTests, unittest.TestCase):
def init_keyring(self):
return file.PlaintextKeyring()
@unittest.skipIf(sys.platform == 'win32',
"Group/World permissions aren't meaningful on Windows")
def test_keyring_not_created_world_writable(self):
"""
Ensure that when keyring creates the file that it's not overly-
permissive.
"""
self.keyring.set_password('system', 'user', 'password')
self.assertTrue(os.path.exists(self.keyring.file_path))
group_other_perms = os.stat(self.keyring.file_path).st_mode & 0077
self.assertEqual(group_other_perms, 0)
| robinson96/GRAPE | keyring/keyring/tests/backends/test_file.py | Python | bsd-3-clause | 1,593 |
# coding: utf-8
# PYTHON IMPORTS
import os, re
from types import MethodType
# DJANGO IMPORTS
from django.shortcuts import render_to_response, HttpResponse
from django.template import RequestContext as Context
from django.http import HttpResponseRedirect, HttpResponseBadRequest, Http404
from django.contrib.admin.views.decorators import staff_member_required
from django.views.decorators.cache import never_cache
from django.utils.translation import ugettext as _
from django import forms
from django.core.urlresolvers import reverse, get_urlconf, get_resolver
from django.core.paginator import Paginator, InvalidPage, EmptyPage
from django.utils.encoding import smart_unicode
from django.contrib import messages
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.core.files.base import ContentFile
from django.core.files.storage import DefaultStorage, default_storage, FileSystemStorage
from django.core.exceptions import ImproperlyConfigured
# FILEBROWSER IMPORTS
from filebrowser.settings import *
from filebrowser.functions import get_breadcrumbs, get_filterdate, get_settings_var, handle_file_upload, convert_filename
from filebrowser.templatetags.fb_tags import query_helper
from filebrowser.base import FileListing, FileObject
from filebrowser.decorators import path_exists, file_exists
from filebrowser.storage import FileSystemStorageMixin, StorageMixin
from filebrowser import signals
# Add some required methods to FileSystemStorage
if FileSystemStorageMixin not in FileSystemStorage.__bases__:
FileSystemStorage.__bases__ += (FileSystemStorageMixin,)
# PIL import
if STRICT_PIL:
from PIL import Image
else:
try:
from PIL import Image
except ImportError:
import Image
# JSON import
try:
import json
except ImportError:
from django.utils import simplejson as json
# This cache contains all *instantiated* FileBrowser sites
_sites_cache = {}
def get_site_dict(app_name='filebrowser'):
"""
Return a dict with all *deployed* FileBrowser sites that have
a given app_name.
"""
if not _sites_cache.has_key(app_name):
return {}
# Get names of all deployed filebrowser sites with a give app_name
deployed = get_resolver(get_urlconf()).app_dict[app_name]
# Get the deployed subset from the cache
return dict((k,v) for k, v in _sites_cache[app_name].iteritems() if k in deployed)
def register_site(app_name, site_name, site):
"""
Add a site into the site dict.
"""
if not _sites_cache.has_key(app_name):
_sites_cache[app_name] = {}
_sites_cache[app_name][site_name] = site
def get_default_site(app_name='filebrowser'):
"""
Returns the default site. This function uses Django's url resolution method to
obtain the name of the default site.
"""
# Get the name of the default site:
resolver = get_resolver(get_urlconf())
name = 'filebrowser'
# Django's default name resolution method (see django.core.urlresolvers.reverse())
app_list = resolver.app_dict[app_name]
if not name in app_list:
name = app_list[0]
return get_site_dict()[name]
class FileBrowserSite(object):
def __init__(self, name=None, app_name='filebrowser', storage=default_storage):
self.name = name
self.app_name = app_name
self.storage = storage
self._actions = {}
self._global_actions = self._actions.copy()
# Register this site in the global site cache
register_site(self.app_name, self.name, self)
# Per-site settings:
self.directory = DIRECTORY
def _directory_get(self):
return self._directory
def _directory_set(self, val):
self._directory = val
directory = property(_directory_get, _directory_set)
def filebrowser_view(self, view):
return staff_member_required(never_cache(view))
def get_urls(self):
from django.conf.urls.defaults import patterns, url, include
urlpatterns = patterns('',
# filebrowser urls (views)
url(r'^browse/$', path_exists(self, self.filebrowser_view(self.browse)), name="fb_browse"),
url(r'^createdir/', path_exists(self, self.filebrowser_view(self.createdir)), name="fb_createdir"),
url(r'^upload/', path_exists(self, self.filebrowser_view(self.upload)), name="fb_upload"),
url(r'^delete_confirm/$', file_exists(self, path_exists(self, self.filebrowser_view(self.delete_confirm))), name="fb_delete_confirm"),
url(r'^delete/$', file_exists(self, path_exists(self, self.filebrowser_view(self.delete))), name="fb_delete"),
url(r'^detail/$', file_exists(self, path_exists(self, self.filebrowser_view(self.detail))), name="fb_detail"),
url(r'^version/$', file_exists(self, path_exists(self, self.filebrowser_view(self.version))), name="fb_version"),
# non-views
url(r'^upload_file/$', staff_member_required(csrf_exempt(self._upload_file)), name="fb_do_upload"),
)
return urlpatterns
def add_action(self, action, name=None):
"""
Register an action to be available globally.
"""
name = name or action.__name__
# Check/create short description
if not hasattr(action, 'short_description'):
action.short_description = action.__name__.replace("_", " ").capitalize()
# Check/create applies-to filter
if not hasattr(action, 'applies_to'):
action.applies_to = lambda x: True
self._actions[name] = action
self._global_actions[name] = action
def disable_action(self, name):
"""
Disable a globally-registered action. Raises KeyError for invalid names.
"""
del self._actions[name]
def get_action(self, name):
"""
Explicitally get a registered global action wheather it's enabled or
not. Raises KeyError for invalid names.
"""
return self._global_actions[name]
def applicable_actions(self, fileobject):
"""
Return a list of tuples (name, action) of actions applicable to a given fileobject.
"""
res = []
for name, action in self.actions:
if action.applies_to(fileobject):
res.append((name, action))
return res
@property
def actions(self):
"""
Get all the enabled actions as a list of (name, func). The list
is sorted alphabetically by actions names
"""
res = self._actions.items()
res.sort(key=lambda name_func: name_func[0])
return res
@property
def urls(self):
return self.get_urls(), self.app_name, self.name
def browse(self, request):
"""
Browse Files/Directories.
"""
filter_re = []
for exp in EXCLUDE:
filter_re.append(re.compile(exp))
for k,v in VERSIONS.iteritems():
exp = (r'_%s(%s)') % (k, '|'.join(EXTENSION_LIST))
filter_re.append(re.compile(exp))
def filter_browse(item):
filtered = item.filename.startswith('.')
for re_prefix in filter_re:
if re_prefix.search(item.filename):
filtered = True
if filtered:
return False
return True
query = request.GET.copy()
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
filelisting = FileListing(path,
filter_func=filter_browse,
sorting_by=query.get('o', DEFAULT_SORTING_BY),
sorting_order=query.get('ot', DEFAULT_SORTING_ORDER),
site=self)
files = []
if SEARCH_TRAVERSE and query.get("q"):
listing = filelisting.files_walk_filtered()
else:
listing = filelisting.files_listing_filtered()
# If we do a search, precompile the search pattern now
do_search = query.get("q")
if do_search:
re_q = re.compile(query.get("q").lower(), re.M)
filter_type = query.get('filter_type')
filter_date = query.get('filter_date')
for fileobject in listing:
# date/type filter
append = False
if (not filter_type or fileobject.filetype == filter_type) and (not filter_date or get_filterdate(filter_date, fileobject.date or 0)):
append = True
# search
if do_search and not re_q.search(fileobject.filename.lower()):
append = False
# append
if append:
files.append(fileobject)
filelisting.results_total = len(listing)
filelisting.results_current = len(files)
p = Paginator(files, LIST_PER_PAGE)
page_nr = request.GET.get('p', '1')
try:
page = p.page(page_nr)
except (EmptyPage, InvalidPage):
page = p.page(p.num_pages)
return render_to_response('filebrowser/index.html', {
'p': p,
'page': page,
'filelisting': filelisting,
'query': query,
'title': _(u'FileBrowser'),
'settings_var': get_settings_var(directory=self.directory),
'breadcrumbs': get_breadcrumbs(query, query.get('dir', '')),
'breadcrumbs_title': "",
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def createdir(self, request):
"""
Create Directory.
"""
from filebrowser.forms import CreateDirForm
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
if request.method == 'POST':
form = CreateDirForm(path, request.POST, filebrowser_site=self)
if form.is_valid():
server_path = os.path.join(path, form.cleaned_data['name'])
try:
signals.filebrowser_pre_createdir.send(sender=request, path=server_path, name=form.cleaned_data['name'], site=self)
self.storage.makedirs(server_path)
# os.mkdir(server_path)
# os.chmod(server_path, 0775) # ??? PERMISSIONS
signals.filebrowser_post_createdir.send(sender=request, path=server_path, name=form.cleaned_data['name'], site=self)
messages.add_message(request, messages.SUCCESS, _('The Folder %s was successfully created.') % form.cleaned_data['name'])
redirect_url = reverse("filebrowser:fb_browse", current_app=self.name) + query_helper(query, "ot=desc,o=date", "ot,o,filter_type,filter_date,q,p")
return HttpResponseRedirect(redirect_url)
except OSError, (errno, strerror):
if errno == 13:
form.errors['name'] = forms.util.ErrorList([_('Permission denied.')])
else:
form.errors['name'] = forms.util.ErrorList([_('Error creating folder.')])
else:
form = CreateDirForm(path, filebrowser_site=self)
return render_to_response('filebrowser/createdir.html', {
'form': form,
'query': query,
'title': _(u'New Folder'),
'settings_var': get_settings_var(directory=self.directory),
'breadcrumbs': get_breadcrumbs(query, query.get('dir', '')),
'breadcrumbs_title': _(u'New Folder'),
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def upload(self, request):
"""
Multipe File Upload.
"""
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
return render_to_response('filebrowser/upload.html', {
'query': query,
'title': _(u'Select files to upload'),
'settings_var': get_settings_var(directory=self.directory),
'breadcrumbs': get_breadcrumbs(query, query.get('dir', '')),
'breadcrumbs_title': _(u'Upload'),
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def delete_confirm(self, request):
"""
Delete existing File/Directory.
"""
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
fileobject = FileObject(os.path.join(path, query.get('filename', '')), site=self)
if fileobject.filetype == "Folder":
filelisting = FileListing(os.path.join(path, fileobject.filename),
sorting_by=query.get('o', 'filename'),
sorting_order=query.get('ot', DEFAULT_SORTING_ORDER),
site=self)
filelisting = filelisting.files_walk_total()
if len(filelisting) > 100:
additional_files = len(filelisting) - 100
filelisting = filelisting[:100]
else:
additional_files = None
else:
filelisting = None
additional_files = None
return render_to_response('filebrowser/delete_confirm.html', {
'fileobject': fileobject,
'filelisting': filelisting,
'additional_files': additional_files,
'query': query,
'title': _(u'Confirm delete'),
'settings_var': get_settings_var(directory=self.directory),
'breadcrumbs': get_breadcrumbs(query, query.get('dir', '')),
'breadcrumbs_title': _(u'Confirm delete'),
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def delete(self, request):
"""
Delete existing File/Directory.
"""
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
fileobject = FileObject(os.path.join(path, query.get('filename', '')), site=self)
if request.GET:
try:
signals.filebrowser_pre_delete.send(sender=request, path=fileobject.path, name=fileobject.filename, site=self)
fileobject.delete_versions()
fileobject.delete()
signals.filebrowser_post_delete.send(sender=request, path=fileobject.path, name=fileobject.filename, site=self)
messages.add_message(request, messages.SUCCESS, _('Successfully deleted %s') % fileobject.filename)
except OSError, (errno, strerror):
# TODO: define error-message
pass
redirect_url = reverse("filebrowser:fb_browse", current_app=self.name) + query_helper(query, "", "filename,filetype")
return HttpResponseRedirect(redirect_url)
def detail(self, request):
"""
Show detail page for a file.
Rename existing File/Directory (deletes existing Image Versions/Thumbnails).
"""
from filebrowser.forms import ChangeForm
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
fileobject = FileObject(os.path.join(path, query.get('filename', '')), site=self)
if request.method == 'POST':
form = ChangeForm(request.POST, path=path, fileobject=fileobject, filebrowser_site=self)
if form.is_valid():
new_name = form.cleaned_data['name']
action_name = form.cleaned_data['custom_action']
try:
action_response = None
if action_name:
action = self.get_action(action_name)
# Pre-action signal
signals.filebrowser_actions_pre_apply.send(sender=request, action_name=action_name, fileobject=[fileobject], site=self)
# Call the action to action
action_response = action(request=request, fileobjects=[fileobject])
# Post-action signal
signals.filebrowser_actions_post_apply.send(sender=request, action_name=action_name, fileobject=[fileobject], result=action_response, site=self)
if new_name != fileobject.filename:
signals.filebrowser_pre_rename.send(sender=request, path=fileobject.path, name=fileobject.filename, new_name=new_name, site=self)
fileobject.delete_versions()
self.storage.move(fileobject.path, os.path.join(fileobject.head, new_name))
signals.filebrowser_post_rename.send(sender=request, path=fileobject.path, name=fileobject.filename, new_name=new_name, site=self)
messages.add_message(request, messages.SUCCESS, _('Renaming was successful.'))
if isinstance(action_response, HttpResponse):
return action_response
if "_continue" in request.POST:
redirect_url = reverse("filebrowser:fb_detail", current_app=self.name) + query_helper(query, "filename="+new_name, "filename")
else:
redirect_url = reverse("filebrowser:fb_browse", current_app=self.name) + query_helper(query, "", "filename")
return HttpResponseRedirect(redirect_url)
except OSError, (errno, strerror):
form.errors['name'] = forms.util.ErrorList([_('Error.')])
else:
form = ChangeForm(initial={"name": fileobject.filename}, path=path, fileobject=fileobject, filebrowser_site=self)
return render_to_response('filebrowser/detail.html', {
'form': form,
'fileobject': fileobject,
'query': query,
'title': u'%s' % fileobject.filename,
'settings_var': get_settings_var(directory=self.directory),
'breadcrumbs': get_breadcrumbs(query, query.get('dir', '')),
'breadcrumbs_title': u'%s' % fileobject.filename,
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def version(self, request):
"""
Version detail.
"""
query = request.GET
path = u'%s' % os.path.join(self.directory, query.get('dir', ''))
fileobject = FileObject(os.path.join(path, query.get('filename', '')), site=self)
return render_to_response('filebrowser/version.html', {
'fileobject': fileobject,
'query': query,
'settings_var': get_settings_var(directory=self.directory),
'filebrowser_site': self
}, context_instance=Context(request, current_app=self.name))
def _upload_file(self, request):
"""
Upload file to the server.
"""
if request.method == "POST":
folder = request.GET.get('folder', '')
if request.is_ajax(): # Advanced (AJAX) submission
filedata = ContentFile(request.raw_post_data)
else: # Basic (iframe) submission
if len(request.FILES) != 1:
raise Http404('Invalid request! Multiple files included.')
filedata = request.FILES.values()[0]
try:
filedata.name = convert_filename(request.GET['qqfile'])
except KeyError:
return HttpResponseBadRequest('Invalid request! No filename given.')
fb_uploadurl_re = re.compile(r'^.*(%s)' % reverse("filebrowser:fb_upload", current_app=self.name))
folder = fb_uploadurl_re.sub('', folder)
path = os.path.join(self.directory, folder)
file_name = os.path.join(path, filedata.name)
file_already_exists = self.storage.exists(file_name)
# Check for name collision with a directory
if file_already_exists and self.storage.isdir(file_name):
ret_json = {'success': False, 'filename': filedata.name}
return HttpResponse(json.dumps(ret_json))
signals.filebrowser_pre_upload.send(sender=request, path=request.POST.get('folder'), file=filedata, site=self)
uploadedfile = handle_file_upload(path, filedata, site=self)
if file_already_exists and OVERWRITE_EXISTING:
old_file = smart_unicode(file_name)
new_file = smart_unicode(uploadedfile)
self.storage.move(new_file, old_file, allow_overwrite=True)
else:
file_name = smart_unicode(uploadedfile)
signals.filebrowser_post_upload.send(sender=request, path=request.POST.get('folder'), file=FileObject(smart_unicode(file_name), site=self), site=self)
# let Ajax Upload know whether we saved it or not
ret_json = {'success': True, 'filename': filedata.name}
return HttpResponse(json.dumps(ret_json))
storage = DefaultStorage()
storage.location = MEDIA_ROOT
storage.base_url = MEDIA_URL
# Default FileBrowser site
site = FileBrowserSite(name='filebrowser', storage=storage)
# Default actions
from actions import *
site.add_action(flip_horizontal)
site.add_action(flip_vertical)
site.add_action(rotate_90_clockwise)
site.add_action(rotate_90_counterclockwise)
site.add_action(rotate_180)
| klueska/django-filebrowser | filebrowser/sites.py | Python | bsd-3-clause | 21,588 |
from django.forms import ValidationError
from cyder.cydns.domain.models import Domain
from cyder.cydns.views import CydnsCreateView
from cyder.cydns.views import CydnsDeleteView
from cyder.cydns.views import CydnsDetailView
from cyder.cydns.views import CydnsListView
from cyder.cydns.views import CydnsUpdateView
| ngokevin/cyder | cyder/cydns/nameserver/views.py | Python | bsd-3-clause | 315 |
#!/usr/bin/env python
##
## Copyright 2016 SRI International
## See COPYING file distributed along with the package for the copyright and license terms.
##
"""
Neuroradiology Findings
Script to sync and generate reports on findings from radiology readings
Examples
========
- Findings and Findings Date is empty before a given date
./neurorad_findings --update --report-type no_findings_before_date --before-date 2015-06-08
Report Types
===========
no_findings_date - no findings date is listed but there is a finding
no_findings - no finding but a finding date is listed
no_findings_or_date - no findings or findings date listed
no_findings_before_date - filters no_findings_or_date by date
"""
__author__ = 'Nolan Nichols <https://orcid.org/0000-0003-1099-3328>'
__modified__ = "2015-08-31"
import os
import pandas as pd
import xnat_extractor as xe
verbose = None
def set_experiment_attrs(config, project, subject, experiment, key, value):
"""
Set the field for an MRSession
For example, datetodvd, findingsdate, findings
:param config: str
:param project: str
:param subject: str
:param experiment: str
:param key: str
:param value: str
:return: str
"""
config = xe.get_config(config)
session = xe.get_xnat_session(config)
api = config.get('api')
path = '{}/projects/{}/subjects/{}/experiments/{}'.format(api, project, subject, experiment)
xsi = 'xnat:mrSessionData'
field = 'xnat:mrSessionData/fields/field[name={}]/field'.format(key)
payload = {'xsiType': xsi, field: value}
return session.put(path, params=payload)
def update_findings_date(config, merged_findings):
"""
For all records found, set the findings date attribute to datatodvd
:param config: dict
:param merged_findings: pd.DataFrame
:return:
"""
for idx, row in merged_findings.iterrows():
result = set_experiment_attrs(config, row.project, row.subject_id, row.experiment_id, 'findingsdate', row.datetodvd)
if verbose:
print("Updated experiment: {}".format(result))
return
def findings_date_empty(df):
"""
Find all experiments that have a finding recorded but no date entered.
:param df: pd.DataFrame
:return: pd.DataFrame
"""
has_finding = df[~df.findings.isnull()]
no_findings_date = has_finding[has_finding.findingsdate.isnull()]
return no_findings_date
def findings_empty(df):
"""
Find all experiments that have a finding date recorded but no finding entered.
:param df: pd.DataFrame
:return: pd.DataFrame
"""
has_findings_date = df[~df.findingsdate.isnull()]
no_findings_date = has_findings_date[has_findings_date.findings.isnull()]
return no_findings_date
def findings_and_date_empty(df):
"""
Find all experiments that have empty findings date and findings.
:param df: pd.DataFrame
:return: pd.DataFrame
"""
no_findings_or_date = df[(df.findings.isnull()) & (df.findingsdate.isnull())]
return no_findings_or_date
def check_dvdtodate_before_date(df, before_date=None):
"""
Find all experiments that have a datetodvd before a given date.
Also convert date from string to datetime (YYYY-MM-DD)
:param df: pd.DataFrame
:return: pd.DataFrame
"""
has_datetodvd = df[~df.datetodvd.isnull()]
has_datetodvd.loc[:, 'datetodvd'] = has_datetodvd.datetodvd.astype('datetime64')
date = pd.Timestamp(before_date)
return has_datetodvd[has_datetodvd.datetodvd < date]
def inner_join_dataframes(df1, df2):
"""
Join two dataframes using an inner join
:param df1: pd.DataFrame
:param df2: pd.DataFrame
:return: pd.DataFrame
"""
return pd.merge(df1, df2, how='inner')
def main(args=None):
config = xe.get_config(args.config)
session = xe.get_xnat_session(config)
if args.update:
# Update the cache of XNAT Experiment XML files
xe.extract_experiment_xml(config, session,
args.experimentsdir, args.num_extract)
# extract info from the experiment XML files
experiment = xe.get_experiments_dir_info(args.experimentsdir)
experiment_df = xe.experiments_to_dataframe(experiment)
reading = xe.get_experiments_dir_reading_info(args.experimentsdir)
reading_df = xe.reading_to_dataframe(reading)
experiment_reading = inner_join_dataframes(experiment_df, reading_df)
# exclude phantoms, but include the traveling human phantoms
site_id_pattern = '[A-EX]-[0-9]{5}-[MFT]-[0-9]'
df = experiment_reading[experiment_reading.site_id.str.contains(site_id_pattern)]
result = None
if args.report_type == 'no_findings_date':
# Findings are listed without a findings date
result = findings_date_empty(df)
if args.set_findings_date:
# Update the findings date to equal the date to dvd
update_findings_date(args.config, result)
elif args.report_type == 'no_findings':
# Findings is empty but a date is listed
result = findings_empty(df)
elif args.report_type == 'no_findings_or_date':
# Both the findings and findings date are empty
result = findings_and_date_empty(df)
if args.reset_datetodvd:
record = result[result.experiment_id == experiment]
project = record.project.values[0]
subject = record.subject_id.values[0]
experiment = args.reset_datetodvd
set_experiment_attrs(args.config, project, subject, experiment, 'datetodvd', 'none')
elif args.report_type == 'correct_dvd_date':
dates_df = pd.read_csv(args.file_to_reset_datetodvd)
result = pd.DataFrame(index=['Subject'], columns=['project', 'subject_id', 'experiment_id',
'site_experiment_id', 'datetodvd', 'findingsdate'])
result = result.fillna(0)
for subject in df['subject_id'].tolist():
if subject in dates_df['mri_xnat_sid'].tolist():
if args.verbose:
print "Checking for {}".format(subject)
eids = dates_df[dates_df['mri_xnat_sid'] == subject]['mri_xnat_eids'].tolist()
date = dates_df[dates_df['mri_xnat_sid'] == subject]['mri_datetodvd'].tolist()
if eids != []:
if len(eids[0]) == 13:
experiment = eids[0]
record = df[df.experiment_id == experiment]
record_date = record['datetodvd'].tolist()
if date != [] and record_date != []:
if record_date[0] != date[0] or type(record_date[0]) != str() :
project = record.project.values[0]
subject = record.subject_id.values[0]
experiment = record.experiment_id.values[0]
set_experiment_attrs(args.config, project, subject, experiment, 'datetodvd', date[0])
elif len(eids[0]) == 27 or eids == None:
experiment = eids[0].split(" ")
for e in experiment:
record_date = record['datetodvd'].tolist()
record = df[df.experiment_id == e]
if date != [] and record_date != []:
if record_date[0] != date[0] or type(record_date[0]) == str():
project = record.project.values[0]
subject = record.subject_id.values[0]
set_experiment_attrs(args.config, project, subject, e, 'datetodvd', date[0])
elif args.report_type == 'no_findings_before_date':
# Findings and Findings Date is empty before a given date
if not args.before_date:
raise(Exception("Please set --before-date YYYY-MM-DD when running the no_findings_before_date report."))
has_dvd_before_date = check_dvdtodate_before_date(df, before_date=args.before_date)
result = findings_and_date_empty(has_dvd_before_date)
result.to_csv(args.outfile, index=False)
else:
raise(NotImplementedError("The report you entered is not in the list."))
result.to_csv(args.outfile,
columns=['project', 'subject_id', 'experiment_id',
'site_experiment_id', 'datetodvd', 'findingsdate'],
index=False)
if verbose:
pd.set_option('display.max_rows', len(result))
print("Total records found: {}".format(len(result)))
print(result[['experiment_id', 'site_experiment_id']])
pd.reset_option('display.max_rows')
print("Finished!")
if __name__ == "__main__":
import sys
import argparse
formatter = argparse.RawDescriptionHelpFormatter
default = 'default: %(default)s'
parser = argparse.ArgumentParser(prog="neurorad_findings.py",
description=__doc__,
formatter_class=formatter)
parser.add_argument('-b', '--before-date',
type=str,
help='To be used with --report_type no_findings_before_date. YYYY-MM-DD')
parser.add_argument('-c', '--config',
type=str,
default=os.path.join(os.path.expanduser('~'),
'.server_config', 'ncanda.cfg'))
parser.add_argument('-d', '--reset_datetodvd',
help='Reset the datetodvd to None for a given XNAT experiment id (e.g., NCANDA_E12345)')
parser.add_argument('-e', '--experimentsdir',
type=str,
default='/tmp/experiments',
help='Name of experiments xml directory')
parser.add_argument('-f', '--file_to_reset_datetodvd',
action='store',
help='CSV file used to reset the datetodvd to a specific date for a given XNAT experiment id (e.g., NCANDA_E12345)')
parser.add_argument('-o', '--outfile',
type=str,
default='/tmp/neurorad_findings.csv',
help='Name of csv file to write.')
parser.add_argument('-n', '--num-extract',
type=int,
help='Number of sessions to extract')
parser.add_argument('-r', '--report-type',
type=str,
required=True,
choices=['no_findings_date', 'no_findings', 'no_findings_or_date', 'no_findings_before_date','correct_dvd_date'],
help='Select a report type. Note that no_findings_before_date requires --before_date.')
parser.add_argument('-s', '--set-findings-date',
action='store_true',
help='If findings are reported and the findings date is None then set it to the date of dvd.')
parser.add_argument('-u', '--update',
action='store_true',
help='Update the cache of xml files')
parser.add_argument('-v', '--verbose',
action='store_true',
help='Print verbose output.')
argv = parser.parse_args()
verbose = argv.verbose
#xe.verbose = argv.verbose
sys.exit(main(args=argv))
| abonil91/ncanda-data-integration | scripts/xnat/neurorad_findings.py | Python | bsd-3-clause | 11,460 |
r"""
Laplace equation with Dirichlet boundary conditions given by a sine function
and constants.
Find :math:`t` such that:
.. math::
\int_{\Omega} c \nabla s \cdot \nabla t
= 0
\;, \quad \forall s \;.
This example demonstrates how to use a hierarchical basis approximation - it
uses the fifth order Lobatto polynomial space for the solution. The adaptive
linearization is applied in order to save viewable results, see both the
options keyword and the ``post_process()`` function that computes the solution
gradient. Use the following commands to view the results (assuming default
output directory and names)::
$ ./postproc.py -b -d't,plot_warp_scalar,rel_scaling=1' 2_4_2_refined_t.vtk --wireframe
$ ./postproc.py -b 2_4_2_refined_grad.vtk
The :class:`sfepy.discrete.fem.meshio.UserMeshIO` class is used to refine the original
two-element mesh before the actual solution.
"""
from __future__ import absolute_import
import numpy as nm
from sfepy import data_dir
from sfepy.base.base import output
from sfepy.discrete.fem import Mesh, FEDomain
from sfepy.discrete.fem.meshio import UserMeshIO, MeshIO
from sfepy.homogenization.utils import define_box_regions
from six.moves import range
base_mesh = data_dir + '/meshes/elements/2_4_2.mesh'
def mesh_hook(mesh, mode):
"""
Load and refine a mesh here.
"""
if mode == 'read':
mesh = Mesh.from_file(base_mesh)
domain = FEDomain(mesh.name, mesh)
for ii in range(3):
output('refine %d...' % ii)
domain = domain.refine()
output('... %d nodes %d elements'
% (domain.shape.n_nod, domain.shape.n_el))
domain.mesh.name = '2_4_2_refined'
return domain.mesh
elif mode == 'write':
pass
def post_process(out, pb, state, extend=False):
"""
Calculate gradient of the solution.
"""
from sfepy.discrete.fem.fields_base import create_expression_output
aux = create_expression_output('ev_grad.ie.Elements( t )',
'grad', 'temperature',
pb.fields, pb.get_materials(),
pb.get_variables(), functions=pb.functions,
mode='qp', verbose=False,
min_level=0, max_level=5, eps=1e-3)
out.update(aux)
return out
filename_mesh = UserMeshIO(mesh_hook)
# Get the mesh bounding box.
io = MeshIO.any_from_filename(base_mesh)
bbox, dim = io.read_bounding_box(ret_dim=True)
options = {
'nls' : 'newton',
'ls' : 'ls',
'post_process_hook' : 'post_process',
'linearization' : {
'kind' : 'adaptive',
'min_level' : 0, # Min. refinement level to achieve everywhere.
'max_level' : 5, # Max. refinement level.
'eps' : 1e-3, # Relative error tolerance.
},
}
materials = {
'coef' : ({'val' : 1.0},),
}
regions = {
'Omega' : 'all',
}
regions.update(define_box_regions(dim, bbox[0], bbox[1], 1e-5))
fields = {
'temperature' : ('real', 1, 'Omega', 5, 'H1', 'lobatto'),
# Compare with the Lagrange basis.
## 'temperature' : ('real', 1, 'Omega', 5, 'H1', 'lagrange'),
}
variables = {
't' : ('unknown field', 'temperature', 0),
's' : ('test field', 'temperature', 't'),
}
amplitude = 1.0
def ebc_sin(ts, coor, **kwargs):
x0 = 0.5 * (coor[:, 1].min() + coor[:, 1].max())
val = amplitude * nm.sin( (coor[:, 1] - x0) * 2. * nm.pi )
return val
ebcs = {
't1' : ('Left', {'t.0' : 'ebc_sin'}),
't2' : ('Right', {'t.0' : -0.5}),
't3' : ('Top', {'t.0' : 1.0}),
}
functions = {
'ebc_sin' : (ebc_sin,),
}
equations = {
'Temperature' : """dw_laplace.10.Omega( coef.val, s, t ) = 0"""
}
solvers = {
'ls' : ('ls.scipy_direct', {}),
'newton' : ('nls.newton', {
'i_max' : 1,
'eps_a' : 1e-10,
}),
}
| lokik/sfepy | examples/diffusion/sinbc.py | Python | bsd-3-clause | 3,897 |
import os
use_gpu = os.environ.get('USE_GPU', 'no')
assert use_gpu in ['auto', 'yes', 'no'], "environment variable USE_GPU, should be one of 'auto', 'yes', 'no'."
if use_gpu == 'auto':
try:
import cudamat as cm
use_gpu = 'yes'
except:
print 'Failed to import cudamat. Using eigenmat. No GPU will be used.'
use_gpu = 'no'
if use_gpu == 'yes':
import cudamat as cm
from cudamat import cudamat_conv as cc
from cudamat import gpu_lock
elif use_gpu == 'no':
import eigenmat as cm
| kobiso/ControlledDropout | deepnet/choose_matrix_library.py | Python | bsd-3-clause | 503 |
import os
import windows
import windows.generated_def as gdef
from windows.debug import symbols
import argparse
parser = argparse.ArgumentParser(prog=__file__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--dbghelp', help='The path of DBG help to use (default use env:PFW_DBGHELP_PATH)')
args = parser.parse_args()
print(args)
if args.dbghelp:
symbols.set_dbghelp_path(args.dbghelp)
else:
if "PFW_DBGHELP_PATH" not in os.environ:
print("Not dbghelp path given and no environ var 'PFW_DBGHELP_PATH' sample may fail")
symbols.engine.options = 0 # Disable defered load
sh = symbols.VirtualSymbolHandler()
ntmod = sh.load_file(r"c:\windows\system32\ntdll.dll", addr=0x420000)
print("Ntdll module is: {0}".format(ntmod))
print(" * name = {0}".format(ntmod.name))
print(" * addr = {0:#x}".format(ntmod.addr))
print(" * path = {0:}".format(ntmod.path))
print(" * type = {0:}".format(ntmod.type))
print(" * pdb = {0:}".format(ntmod.pdb))
print("")
TEST_FUNCTION = "LdrLoadDll"
print("Resolving function <{0}>".format(TEST_FUNCTION))
loaddll = sh["ntdll!" + TEST_FUNCTION]
print("Symbol found !")
print(" * __repr__: {0!r}".format(loaddll))
print(" * __str__: {0}".format(loaddll))
print(" * addr: {0:#x}".format(loaddll.addr))
print(" * name: {0}".format(loaddll.name))
print(" * fullname: {0}".format(loaddll.fullname))
print(" * module: {0}".format(loaddll.module))
print("")
print("Loading kernelbase")
kbasemod = sh.load_file(r"c:\windows\system32\kernelbase.dll", addr=0x1230000)
print("Loaded modules are: {0}".format(sh.modules))
LOOKUP_ADDR = 0x1231242
print("Looking up address: {0:#x}".format(LOOKUP_ADDR))
lookupsym = sh[LOOKUP_ADDR]
print("Symbol resolved !")
print(" * __repr__: {0!r}".format(lookupsym))
print(" * __str__: {0}".format(lookupsym))
print(" * start: {0:#x}".format(lookupsym.start))
print(" * addr: {0:#x}".format(lookupsym.addr))
print(" * displacement: {0:#x}".format(lookupsym.displacement))
print(" * name: {0}".format(lookupsym.name))
print(" * fullname: {0}".format(lookupsym.fullname))
print(" * module: {0}".format(lookupsym.module))
| hakril/PythonForWindows | samples/debug/symbols/virtsymdemo.py | Python | bsd-3-clause | 2,138 |
# -*- coding: utf-8 -*-
# __author__ = chenchiyuan
from __future__ import division, unicode_literals, print_function
from models import Commodity, CommodityDay, CommodityInventory, CommodityProduct
from django.contrib import admin
class CommodityDayAdmin(admin.ModelAdmin):
pass
class CommodityPrivilegeAdmin(admin.ModelAdmin):
pass
class CommodityInventoryAdmin(admin.ModelAdmin):
filter_horizontal = ("days", )
raw_id_fields = ("commodity", )
list_display = ("amount", "inventory_type", "begin", "end", "show_days", "price")
def show_days(self, obj):
days = obj.days.all().values_list("name", flat=True)
return ";".join(days)
show_days.short_description = u"有效日"
class CommodityAdmin(admin.ModelAdmin):
class Media:
css = {
"all": ("/static/css/admin-override.css", ),
}
class CommodityProductAdmin(admin.ModelAdmin):
list_display = ("name", "city", "amount", "price", "inventory_type", "remark")
admin.site.register(Commodity, CommodityAdmin)
admin.site.register(CommodityInventory, CommodityInventoryAdmin)
admin.site.register(CommodityProduct, CommodityProductAdmin)
admin.site.register(CommodityDay, CommodityDayAdmin) | chenchiyuan/hawaii | hawaii/apps/commodity/admin.py | Python | bsd-3-clause | 1,226 |
"""
sentry.views.base
~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from sentry.conf import settings
from sentry.utils import InstanceManager
__all__ = ('View',)
class View(object):
verbose_name = None
verbose_name_plural = None
ref = None # we cache the actual object here
def should_store(self, event):
return False
handlers = InstanceManager(settings.VIEWS)
| Kronuz/django-sentry | sentry/views/base.py | Python | bsd-3-clause | 489 |
Subsets and Splits